
toolkit.plugins.packagemanagers.bazel-package-manager.38.0.0.source-code.StarlarkParser.kt Maven / Gradle / Ivy
/*
* Copyright (C) 2024 The ORT Project Authors (see )
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
* License-Filename: LICENSE
*/
// The initial version of the code in this file was generated by GPT-4. The chat log that includes the generated code
// can be found at https://chat.openai.com/share/6cc4f4e1-9ac8-4eb7-8dd0-fd8476a38537
package org.ossreviewtoolkit.plugins.packagemanagers.bazel
internal data class ModuleMetadata(
val module: ModuleDirective? = null,
val dependencies: List
)
internal data class ModuleDirective(
val name: String,
val version: String,
val compatibilityLevel: Int
)
internal data class BazelDepDirective(
val name: String,
val version: String,
val devDependency: Boolean
)
internal enum class TokenType {
IDENTIFIER, STRING, NUMBER, BOOLEAN, EQUALS, COMMA, LPAREN, RPAREN, LBRACKET, RBRACKET, EOF
}
internal data class Token(val type: TokenType, val value: String)
internal class Lexer(private val input: String) {
private var pos = 0
private val length = input.length
private fun peek(offset: Int = 0): Char = if (pos + offset < length) input[pos + offset] else '\u0000'
private fun advance(steps: Int = 1) {
pos += steps
}
private fun isEOF(): Boolean = pos >= length
private fun isDigit(c: Char): Boolean = c in '0'..'9'
private fun isAlpha(c: Char): Boolean = c in 'a'..'z' || c in 'A'..'Z' || c == '_'
private fun isMultilineString(): Boolean = peek() == '"' && peek(1) == '"' && peek(2) == '"'
private fun tokenizeString(): Token {
if (isMultilineString()) {
advance(3) // Skip opening """.
val start = pos
while (!isMultilineString() && !isEOF()) {
advance()
}
val str = input.substring(start, pos)
advance(3) // Skip closing """.
return Token(TokenType.STRING, str)
} else {
advance()
val start = pos
while (peek() != '"' && !isEOF()) {
advance()
}
val str = input.substring(start, pos)
advance() // Skip closing quote.
return Token(TokenType.STRING, str)
}
}
private fun tokenizeIdentifier(): Token {
val start = pos
while (isAlpha(peek()) || isDigit(peek()) || peek() == '_') {
advance()
}
val str = input.substring(start, pos)
if (str.lowercase() == "true" || str.lowercase() == "false") {
return Token(TokenType.BOOLEAN, str.lowercase())
}
return Token(TokenType.IDENTIFIER, str)
}
private fun tokenizeNumber(): Token {
val start = pos
while (isDigit(peek())) {
advance()
}
val number = input.substring(start, pos)
return Token(TokenType.NUMBER, number)
}
fun nextToken(): Token {
while (!isEOF()) {
when (val c = peek()) {
'#' -> { // Skip comment.
while (peek() != '\n' && !isEOF()) {
advance()
}
}
' ', '\n', '\r', '\t' -> advance()
'=' -> {
advance()
return Token(TokenType.EQUALS, "=")
}
'(' -> {
advance()
return Token(TokenType.LPAREN, "(")
}
')' -> {
advance()
return Token(TokenType.RPAREN, ")")
}
'[' -> {
advance()
return Token(TokenType.LBRACKET, "[")
}
']' -> {
advance()
return Token(TokenType.RBRACKET, "]")
}
',' -> {
advance()
return Token(TokenType.COMMA, ",")
}
'"' -> {
return tokenizeString()
}
else -> {
if (isAlpha(c)) {
return tokenizeIdentifier()
} else if (isDigit(c)) {
return tokenizeNumber()
} else {
advance() // Skip anything else to avoid an endless loop.
}
}
}
}
return Token(TokenType.EOF, "")
}
}
internal class Parser(input: String) {
private val lexer = Lexer(input)
private var currentToken: Token = lexer.nextToken()
private fun eat(type: TokenType) {
require(currentToken.type == type) { "Unexpected token: ${currentToken.type}, expected: $type" }
currentToken = lexer.nextToken()
}
private fun parseKeyValue(): Pair {
val key = currentToken.value
eat(TokenType.IDENTIFIER)
eat(TokenType.EQUALS)
var value = currentToken.value
if (currentToken.type in setOf(TokenType.STRING, TokenType.NUMBER, TokenType.BOOLEAN)) {
eat(currentToken.type)
} else if (currentToken.type == TokenType.LBRACKET) { // Skip lists for now.
while (currentToken.type != TokenType.RBRACKET) {
eat(currentToken.type)
}
eat(TokenType.RBRACKET)
value = "TODO"
}
return key to value
}
private fun parseBazelDepDirective(): BazelDepDirective {
eat(TokenType.IDENTIFIER)
eat(TokenType.LPAREN)
val params = mutableMapOf()
while (currentToken.type != TokenType.RPAREN) {
val (key, value) = parseKeyValue()
params[key] = value
if (currentToken.type == TokenType.COMMA) eat(TokenType.COMMA)
}
eat(TokenType.RPAREN)
return BazelDepDirective(
name = params["name"] ?: throw IllegalArgumentException("Missing name in 'bazel_dep' directive"),
version = params["version"] ?: throw IllegalArgumentException("Missing version in 'bazel_dep' directive"),
devDependency = params["dev_dependency"]?.toBoolean() == true
)
}
private fun parseModuleDirective(): ModuleDirective {
eat(TokenType.IDENTIFIER)
eat(TokenType.LPAREN)
val params = mutableMapOf()
while (currentToken.type != TokenType.RPAREN) {
val (key, value) = parseKeyValue()
params[key] = value
if (currentToken.type == TokenType.COMMA) eat(TokenType.COMMA)
}
eat(TokenType.RPAREN)
return ModuleDirective(
name = params["name"] ?: throw IllegalArgumentException("Missing name in 'module' directive"),
version = params["version"] ?: throw IllegalArgumentException("Missing version in 'module' directive"),
compatibilityLevel = params["compatibility_level"]?.toInt() ?: 0
)
}
fun parse(): ModuleMetadata {
val dependencies = mutableListOf()
var moduleDirective: ModuleDirective? = null
while (currentToken.type != TokenType.EOF) {
when (currentToken.value) {
"module" -> moduleDirective = parseModuleDirective()
"bazel_dep" -> dependencies.add(parseBazelDepDirective())
else -> eat(currentToken.type) // Skip unknown tokens.
}
}
return ModuleMetadata(
module = moduleDirective,
dependencies = dependencies
)
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy