diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..37bb5b4
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,52 @@
+.DS_Store
+*.[56789ao]
+*.a[56789o]
+*.so
+*.pyc
+._*
+.nfs.*
+[56789a].out
+*~
+*.orig
+*.rej
+*.exe
+.*.swp
+core
+*.cgo*.go
+*.cgo*.c
+_cgo_*
+_obj
+_test
+_testmain.go
+/VERSION.cache
+/bin/
+/build.out
+/doc/articles/wiki/*.bin
+/goinstall.log
+/last-change
+/misc/cgo/life/run.out
+/misc/cgo/stdio/run.out
+/misc/cgo/testso/main
+/pkg/
+/src/*.*/
+/src/cmd/cgo/zdefaultcc.go
+/src/cmd/dist/dist
+/src/cmd/go/internal/cfg/zdefaultcc.go
+/src/cmd/go/internal/cfg/zosarch.go
+/src/cmd/internal/objabi/zbootstrap.go
+/src/go/build/zcgo.go
+/src/go/doc/headscan
+/src/runtime/internal/sys/zversion.go
+/src/unicode/maketables
+/test.out
+/test/garbage/*.out
+/test/pass.out
+/test/run.out
+/test/times.out
+
+#Personal
+
+src/testbinaries
+src/tests_random
+src/nuru
+Notes.md
\ No newline at end of file
diff --git a/README.md b/README.md
index e3f6bba..5fae61a 100644
--- a/README.md
+++ b/README.md
@@ -3,6 +3,8 @@
+
+
@@ -19,19 +21,15 @@ instructions for your device below:
- Download the binary:
```
-curl -O -L https://github.com/AvicennaJr/Nuru/releases/download/v0.1.0/nuru_linux_amd64_v0.1.0.tar.gz
+curl -O -L https://github.com/AvicennaJr/Nuru/releases/download/v0.2.0/nuru_linux_amd64_v0.2.0.tar.gz
```
- - Extract the file:
+ - Extract the file to make global available:
```
-tar -xzvf nuru_linux_amd64_v0.1.0.tar.gz
+sudo tar -C /usr/local/bin -xzvf nuru_linux_amd64_v0.2.0.tar.gz
```
- - Add it to your $PATH:
-```
-cp nuru $HOME/bin
-```
- Confirm installation with:
```
@@ -44,12 +42,12 @@ nuru -v
- Download the binary with this command:
```
-curl -O -L https://github.com/AvicennaJr/Nuru/releases/download/v0.1.0/nuru_android_arm64_v0.1.0.tar.gz
+curl -O -L https://github.com/AvicennaJr/Nuru/releases/download/v0.2.0/nuru_android_arm64_v0.2.0.tar.gz
```
- Extract the file:
```
-tar -xzvf nuru_android_arm64_v0.1.0.tar.gz
+tar -xzvf nuru_android_arm64_v0.2.0.tar.gz
```
- Add it to path:
@@ -64,24 +62,9 @@ nuru -v
### Windows
- - Make a bin directory if it doesn't exist:
-
-```
-mkdir C:\bin
-```
- - Download the Nuru Program [Here](https://github.com/AvicennaJr/Nuru/releases/download/v0.1.0/nuru_windows_amd64_v0.1.0.exe)
- - Rename the downloaded program from `nuru_windows_amd64_v0.1.0.exe` to `nuru.exe`
- - Move the file `nuru.exe` to the folder `C:\bin`
- - Add the bin folder to Path with this command:
-
-```
-setx PATH "C:\bin;%PATH%"
-```
- - Confirm installation with:
-
-```
-nuru -v
-```
+ - Download the Nuru Installer [Here](https://github.com/AvicennaJr/Nuru/releases/download/v0.2.0/Nuru_Windows_Installer_v0.2.0.exe)
+ - Install the downloaded installer
+ - You can watch a full video guide [Here](https://youtu.be/T-lfaoqIFD4)
### Building From Source
@@ -98,17 +81,20 @@ go build -o nuru main.go
nuru -v
```
-## Syntax
+## Syntax At A Glance
+
+**NOTE**
+> There is a more detailed documentation of the language [here](./docs/README.md).
Nuru, although still in its early stage, intends to be a fully functional programming language, and thus it has been baked with many features.
### Defining A Variable
-To initiliaze a variable use the `acha` keyword:
+To initiliaze a variable use the `fanya` keyword:
```
-acha x = 2;
-acha y = 3;
+fanya x = 2;
+fanya y = 3;
andika(x*y) // output is 6
```
@@ -129,7 +115,6 @@ Nuru supports both single line and multiple line comments as shown below:
// Single line comment
/*
-
Multiple
Line
Comment
@@ -138,9 +123,7 @@ Comment
### Arithmetic Operations
-For now Nuru supports `+`, `-`, `/` and `*`. More will be added. The `/` operation will truncate (round to a whole number) as Floating points are not supported yet.
-
-Nuru also provides precedence of operations using the BODMAS rule:
+For now Nuru supports `+`, `-`, `/`, `*` and `%`. Nuru also provides precedence of operations using the BODMAS rule:
```
2 + 2 * 3 // output = 8
@@ -156,16 +139,18 @@ Type | Syntax | Comments
--------- | ----------------------------------------- | -----------------------
BOOL | `kweli sikweli` | kweli == true, sikweli == false
INT | `1, 100, 342, -4` | These are signed 64 bit integers
-STRING | `"" "mambo" "habari yako"` | They MUST be in DOUBLE QUOTES `"`
-ARRAY | `[] [1, 2, 3] [1, "moja", kweli]` | Arrays can hold any types
+FLOAT | `2.3, 4.5. 100.8094` | Signed 64 bit floats
+STRING | `"" "mambo" "habari yako"` | They can be in double `"` or single `'` quotes
+ARRAY | `[] [1, 2, 3] [1, "moja", kweli]` | Arrays can hold any types
DICT | `{} {"a": 3, 1: "moja", kweli: 2}` | Keys can be int, string or bool. Values can be anything
+NULL | `tupu` | These are nil objects
### Functions
This is how you define a function in Nuru:
```
-acha jumlisha = fn(x, y) {
+fanya jumlisha = unda(x, y) {
rudisha x + y
}
@@ -175,7 +160,7 @@ andika(jumlisha(3,4))
Nuru also supports recursion:
```
-acha fibo = fn(x) {
+fanya fibo = unda(x) {
kama (x == 0) {
rudisha 0;
} au kama (x == 1) {
@@ -205,11 +190,11 @@ kama (2<1) {
Nuru's while loop syntax is as follows:
```
-acha i = 10
+fanya i = 10
wakati (i > 0) {
andika(i)
- i = i - 1
+ i--
}
```
@@ -217,7 +202,7 @@ wakati (i > 0) {
This is how you initiliaze and perform other array operations in Nuru:
```
-acha arr = []
+fanya arr = []
// To add elements
@@ -225,9 +210,9 @@ sukuma(arr, 2)
andika(arr) // output = [2]
// Add two Arrays
-acha arr2 = [1,2,3,4]
+fanya arr2 = [1,2,3,4]
-acha arr3 = arr1 + arr2
+fanya arr3 = arr1 + arr2
andika(arr3) // output = [2,1,2,3,4]
@@ -246,7 +231,7 @@ andika(arr[3]) // output = 3
Nuru also supports dictionaris and you can do a lot with them as follows:
```
-acha mtu = {"jina": "Mojo", "kabila": "Mnyakusa"}
+fanya mtu = {"jina": "Mojo", "kabila": "Mnyakusa"}
// get value from key
andika(mtu["jina"]) // output = Mojo
@@ -267,18 +252,35 @@ andika(mtu) // output = {"jina": "Avicenna", "kabila": "Mnyakusa", "anapoishi":
// You can also add two Dictionaries
-acha kazi = {"kazi": "jambazi"}
+fanya kazi = {"kazi": "jambazi"}
mtu = mtu + kazi
andika(mtu) // output = {"jina": "Avicenna", "kabila": "Mnyakusa", "anapoishi": "Dar Es Salaam", "kazi": "jambazi"}
```
+### For Loops
+
+These can iterate over strings, arrays and dictionaries:
+```
+kwa i ktk "habari" {
+ andika(i)
+}
+/* //output
+h
+a
+b
+a
+r
+i
+*/
+```
+
### Getting Input From User
In Nuru you can get input from users using the `jaza()` keyword as follows:
```
-acha jina = jaza("Unaitwa nani? ") // will prompt for input
+fanya jina = jaza("Unaitwa nani? ") // will prompt for input
andika("Habari yako " + jina)
```
@@ -301,7 +303,7 @@ Kindly Note that everything should be placed in a single line. Here's an example
```
### Running From File
-To run a Nuru script, write the `nuru` command followed by the name of the file with a `.nr` extension:
+To run a Nuru script, write the `nuru` command followed by the name of the file with a `.nr` or `.sw` extension:
```
nuru myFile.nr
@@ -313,7 +315,13 @@ Kindly open an [Issue](https://github.com/AvicennaJr/Nuru/issues) to make sugges
## Contributions
-All contributions are welcomed. Clone the repo, hack it, make sure all tests are passing then submit a pull request.
+### Documentation
+
+There are documentations for two languages, English and Kiswahili, which are both under the `docs` folder. All files are written in markdown. Feel free to contribute by making a pull request.
+
+### Code
+
+Clone the repo, hack it, make sure all tests are passing then submit a pull request.
## License
diff --git a/docs/en/README.md b/docs/en/README.md
new file mode 100644
index 0000000..8f5a3a3
--- /dev/null
+++ b/docs/en/README.md
@@ -0,0 +1,78 @@
+# NURU PROGRAMMING LANGUAGE DOCUMENTATION
+
+This documentation is intended for people with some experience in programming. It describes the syntax, types and how to perform various operations using the language.
+
+## Table Of Contents
+
+- [Comments](./comments.md)
+- [Numbers](./numbers.md)
+ * [Precedence](./numbers.md#precedence)
+ * [Unary Increments](./numbers.md#unary-increments)
+ * [Shorthand Assignments](./numbers.md#shorthand-assignment)
+ * [Negative Numbers](./numbers.md#negative-numbers)
+- [Strings](./strings.md)
+ * [Definition](./strings.md#definition)
+ * [Concatenation](./strings.md#concatenation)
+ * [Looping over a String](./strings.md#looping-over-a-string)
+ * [Comparing Strings](./strings.md#comparing-strings)
+ * [Length of a String](./strings.md#length-of-a-string)
+- [Arrays](./arrays.md)
+ * [Definition](./arrays.md#definition)
+ * [Accessing Elements](./arrays.md#accessing-elements)
+ * [Reassigning Elements](./arrays.md#reassigning-elements)
+ * [Looping over an Array](./arrays.md#looping-over-an-array)
+ * [Check if an Element Exists](./arrays.md#check-if-an-element-exists)
+ * [Concatenating Arrays](./arrays.md#concatenating-arrays)
+ * [Length of an Array](./arrays.md#length-of-an-array)
+ * [Adding Elements to an Array](./arrays.md#adding-elements-to-an-array)
+ * [Getting the last item in an Array](./arrays.md#getting-the-last-element-in-an-array)
+- [Dictionaries](./dictionaries.md)
+ * [Definition](./dictionaries.md#definition)
+ * [Accessing Elements](./dictionaries.md#accessing-elements)
+ * [Updating Elements](./dictionaries.md#updating-elements)
+ * [Adding New Elements](./dictionaries.md#adding-new-elements)
+ * [Concatenating Dictionaries](./dictionaries.md#concatenating-dictionaries)
+ * [Checking if a Key Exists](./dictionaries.md#checking-if-key-exists-in-a-dictionary)
+ * [Looping Over a Dictionary](./dictionaries.md#looping-over-a-dictionary)
+- [Booleans](./bool.md)
+ * [Example 1](./bool.md#example-1)
+ * [Example 2](./bool.md#example-2)
+- [Identifiers](./identifiers.md)
+ * [Example 1](./identifiers.md#example-1)
+- [For Loops](./for.md)
+ * [Definition](./for.md#definition)
+ * [Key-Value Pairs](./for.md#key-value-pairs)
+ * [Break and Continue](./for.md#break-vunja-and-continue-endelea)
+- [While Loops](./while.md)
+ * [Definition](./while.md#definition)
+ * [Break and Continue](./while.md#break-vunja-and-continue-endelea)
+- [If/Else](./ifStatements.md)
+ * [Definition](./ifStatements.md#definition)
+ * [Else Block](./ifStatements.md#else-block)
+- [Switch Statements](./switch.md)
+ * [Definition](./switch.md#definition)
+ * [Multiple Values in Case](./switch.md#multiple-values-in-a-case)
+ * [Default Keyword](./switch.md#default-kawaida)
+- [Functions](./function.md)
+ * [Definition](./function.md#definition)
+ * [Parameters](./function.md#parameters)
+ * [Return](./function.md#return-rudisha)
+ * [Recursion](./function.md#recursion)
+- [Builtins](./builtins.md)
+ * [andika()](./builtins.md#andika)
+ * [jaza()](./builtins.md#jaza)
+ * [aina()](./builtins.md#aina)
+ * [idadi()](./builtins.md#idadi)
+ * [sukuma()](./builtins.md#sukuma)
+ * [yamwisho()](./builtins.md#yamwisho)
+- [Null](./null.md)
+- [Operators](./operators.md)
+ * [Assignment](./operators.md#assignment)
+ * [Arithmetic](./operators.md#arithmetic-operators)
+ * [Comparison](./operators.md#comparison-operators)
+ * [Member](./operators.md#member-operator)
+ * [Logic](./operators.md#logic-operators)
+ * [Precedence](./operators.md#precedence-of-operators)
+- [Keywords](./keywords.md)
+ * [Reserved](./keywords.md#reserved)
+ * [Builtins](./keywords.md#builtins)
\ No newline at end of file
diff --git a/docs/en/arrays.md b/docs/en/arrays.md
new file mode 100644
index 0000000..f36ee0d
--- /dev/null
+++ b/docs/en/arrays.md
@@ -0,0 +1,121 @@
+## ARRAYS (ORODHA)
+
+### Definition
+
+Arrays are enclosed in square brackets `[]` and they can hold any type, even function definitions:
+```go
+fanya arr = [1, "mambo", kweli, unda(x, y){rudisha x + y}, 2 * 3 + 20]
+
+andika(arr)
+
+/*
+[1, mambo, kweli, unda(x, y) {rudisha (x + y);}, 26]
+*/
+```
+
+### Accessing Elements
+
+You can access individual elements through indexes starting from zero:
+```go
+fanya herufi = ["a", "b", "c"]
+
+andika(herufi[0]) // a
+```
+
+### Reassigning Elements
+
+You can also reassign values in elements:
+```go
+fanya herufi = ["a", "b", "c"]
+
+herufi[1] = "z"
+
+andika(herufi) // ["a", "z", "c"]
+```
+
+### Looping over an Array
+
+- You can also iterate through an array:
+```go
+fanya herufi = ["a", "b", "c"]
+
+kwa i ktk herufi {
+ andika(i)
+}
+/* a
+ b
+ c */
+```
+
+- And for a key, value pair:
+```go
+kwa i, v ktk herufi {
+ andika(i, "=>", v)
+}
+
+/* 0 => a
+ 1 => b
+ 2 => c */
+```
+
+### Check if an Element exists
+
+You can also check if elements exist in an array:
+```go
+andika("d" ktk herufi) // sikweli
+andika("a" ktk herufi) // kweli
+```
+
+### Concatenating Arrays
+
+- You can also add two arrays as follows:
+```
+fanya h1 = ["a", "b", "c"]
+fanya h2 = [1, 2, 3]
+fanya h3 = h1 + h2
+
+andika(h3) // ["a", "b", "c", 1, 2, 3]
+
+h2 += h3
+
+andika(h2) // [1, 2, 3, "a", "b", "c", 1, 2, 3]
+```
+
+- You can also multiply an array as follows:
+```
+fanya a = [1, 2, 3]
+
+andika(a * 2) // [1, 2, 3, 1, 2, 3]
+```
+
+### Length of an Array
+
+You can get the length of an array with `idadi`:
+```
+fanya a = ["a", "b", "c"]
+
+andika(idadi(a)) // 3
+```
+
+### Adding Elements to an Array
+
+You can add new elements to an array with `sukuma`:
+```go
+fanya a = [1, 2, 3]
+
+// you must reassign for the new value to be saved
+a = sukuma(a, "mambo")
+
+andika(a) // [1, 2, 3, "mambo"]
+```
+
+### Getting the Last Element in an Array
+
+You can get the last element of an array with `yamwisho`:
+```
+fanya a = [1, 2, 3]
+
+andika(yamwisho(a)) // 3
+```
+**Please Note**
+> A lot more array methods will be added in the future
\ No newline at end of file
diff --git a/docs/en/bool.md b/docs/en/bool.md
new file mode 100644
index 0000000..4c0aa09
--- /dev/null
+++ b/docs/en/bool.md
@@ -0,0 +1,32 @@
+## BOOLEANS
+
+Boolean objects are `truthy`, meaning any value is true. A value is false only when its null (ie. `tupu`) or false (ie `sikweli`):
+### Example 1
+```
+fanya x = 0
+
+kama (x) {
+ andika("I am true")
+} sivyo {
+ andika("I am not true")
+}
+
+// it will print "I am true"
+```
+
+### Example 2
+```
+kama (tupu) {
+andika("I am true")
+} sivyo {
+ andika("I am not true")
+}
+
+// will print "I am not true"
+```
+
+Expressions can also be evaluated to true or false:
+```
+andika(1 > 2) // sikweli
+
+andika(1 + 3 < 10) // kweli
\ No newline at end of file
diff --git a/docs/en/builtins.md b/docs/en/builtins.md
new file mode 100644
index 0000000..43925d9
--- /dev/null
+++ b/docs/en/builtins.md
@@ -0,0 +1,69 @@
+## BUILTINS
+
+Nuru has a few builtin functions and more will be added in the future
+
+### andika()
+
+This function will print out whatever is placed inside the parenthesis `()`. It can take zero or multiple number of arguments. Arguments will be printed out with a space in between them:
+```
+andika(1,2,3) // 1 2 3
+```
+`andika()` also supports some basic formatting such as:
+- `/n` for a new line
+- `/t` for a tab space
+- `\\` for a backslash
+
+### jaza()
+
+This is a function to get input from a user. It can have zero or one argument. The only acceptable argument is a string:
+```
+fanya salamu = unda(){
+ fanya jina = jaza("Unaitwa nani? ")
+ andika("mambo vipi", jina)
+}
+
+salamu()
+```
+
+### aina()
+
+`Aina()` is a function to help identify the type of an object. It only accepts one argument:
+```
+aina(2) // NAMBA
+```
+
+### idadi()
+
+`idadi` is a function to know a length of an object. It accepts only one argument which can be a `string`, `list` or `dictionary`:
+```
+idadi("mambo") // 5
+```
+
+### jumla()
+
+`jumla` is a function that gives the sum of numbers (integers/floats) in a list. It accepts only one argument which is a `list` of numbers :
+```
+jumla([1,2,3,4]) // 10
+```
+
+
+### sukuma()
+
+`sukuma()` is a function that adds a new element to an array. The function accepts two arguments, the first must be a list and the second is the element to be added/appended:
+```
+fanya majina = ["juma", "asha"]
+
+majina = sukuma(majina, "mojo")
+```
+**Notice that the list is reassigned for the change to take effect**
+
+### yamwisho()
+
+This is a function to get the last element in an array. It only accepts one argument which must be an array:
+```
+fanya namba = [1,2,3,4,5]
+
+yamwisho(namba) // 5
+```
+
+**MORE BUILTIN FUNCTIONS WILL BE ADDED WITH TIME**
\ No newline at end of file
diff --git a/docs/en/comments.md b/docs/en/comments.md
new file mode 100644
index 0000000..b10f988
--- /dev/null
+++ b/docs/en/comments.md
@@ -0,0 +1,14 @@
+## COMMENTS
+
+- You can write single line comments with `//`:
+```
+// This line will be ignored
+```
+- Multiline comments start with `/*` and end with `*/`:
+```
+/*
+These lines
+Will
+be
+ignored
+*/
\ No newline at end of file
diff --git a/docs/en/dictionaries.md b/docs/en/dictionaries.md
new file mode 100644
index 0000000..5e195f4
--- /dev/null
+++ b/docs/en/dictionaries.md
@@ -0,0 +1,96 @@
+## DICTIONARIES (KAMUSI)
+
+### DEFINITION
+
+Dictionaries are enclosed by curly braces `{}` and have keys and values. You can define a dictionary as follows:
+```
+fanya k = {"jina": "juma"}
+```
+- The `keys` can be `string, int, float` and `boolean`
+- The `values` can be of any type; `string, int, float, boolean, null` and even a `function`:
+```
+fanya k = {
+ "jina": "juma",
+ "umri": 2,
+ kweli : "true",
+ "mi ni function": unda(x){andika("habari", x)}
+ "sina value": tupu
+}
+
+andika(k["sina value"]) // tupu
+```
+
+### Accessing Elements
+
+You can access individual elements as follows:
+```
+andika(k[kweli]) // true
+
+andika(k["mi ni function"]("juma")) // habari juma
+```
+
+### Updating Elements
+You can update the value of an element as follows:
+```
+k['umri'] = 50
+
+andika(k['umri']) // 50
+```
+
+### Adding New Elements
+If a key-value pair doesn't exist, you can add one as follows:
+```
+k["I am new"] = "new element"
+
+andika(k["I am new"]) // new element
+```
+
+### Concatenating Dictionaries
+
+You can add two dictionaries as follows:
+```
+fanya a = {"a": "andazi"}
+fanya b = {"b": "bunduki"}
+fanya c = a + b
+
+andika(c) // {"a": "andazi", "b": "bunduki"}
+```
+
+### Checking If Key Exists In A Dictionary
+
+Use the `ktk` keyword to check if a key exists:
+```
+"umri" ktk k // kweli
+"ubini" ktk k // sikweli
+```
+
+### Looping Over A Dictionary
+
+- You can loop over a dictionary as follows:
+
+```go
+fanya k = {"a": "afya", "b": "buibui", "c": "chapa"}
+kwa i, v ktk k {
+ andika(i, "=>", v)
+}
+/* a => afya
+ b => buibui
+ c => chapa */
+```
+
+- You can also loop over just values as follows:
+
+```
+kwa v ktk k {
+ andika(v)
+}
+
+/*
+afya
+buibui
+chapa
+*/
+```
+
+**Please Note**
+> A lot more dict methods will be added in the future
\ No newline at end of file
diff --git a/docs/en/for.md b/docs/en/for.md
new file mode 100644
index 0000000..cd56a6c
--- /dev/null
+++ b/docs/en/for.md
@@ -0,0 +1,110 @@
+## FOR (KWA)
+
+### Definition
+
+For is used to iterate over an iterable object. An iterable object is a `string`, `array` or `dictionaries`. You use the `kwa` keyword followed by an identifier such as `k` or `v` followed by an iterable. The iterable block must be enclosed in a bracket `{}`. Here's an example:
+```
+fanya jina = "lugano"
+
+kwa i ktk jina {
+ andika(i)
+}
+
+/*
+l
+u
+g
+a
+n
+o
+*/
+```
+
+### Key Value Pairs
+
+Nuru allows you to get both the value or the key/value pair of an iterable. To get only the value, use one temporary identifier as such:
+```
+fanya kamusi = {"a": "andaa", "b": "baba"}
+
+kwa v ktk kamusi {
+ andika(v)
+}
+
+/*
+andaa
+baba
+*/
+```
+To get both the key and the value, use two temporary identifiers:
+```
+kwa k, v ktk kamusi {
+ andika(k + " ni + " v)
+}
+
+/*
+a ni andaa
+b ni baba
+*/
+```
+- Note that key-value pair iteration also works for `strings` and `lists`:
+```
+kwa i, v ktk "mojo" {
+ andika(i, "->", v)
+}
+/*
+0 -> m
+1 -> o
+2 -> j
+3 -> o
+*/
+fanya majina = ["juma", "asha", "haruna"]
+
+kwa i, v ktk majina {
+ andika(i, "-", v)
+}
+
+/*
+0 - juma
+1 - asha
+2 - haruna
+*/
+```
+
+### Break (Vunja) and Continue (Endelea)
+
+- A loop can be terminated using the `vunja` keyword:
+```
+kwa i, v ktk "mojo" {
+ kama (i == 2) {
+ andika("nimevunja")
+ vunja
+ }
+ andika(v)
+}
+/*
+m
+o
+nimevunja
+*/
+```
+
+- A specific iteration can be skipped using the `endelea` keyword:
+```
+kwa i, v ktk "mojo" {
+ kama (i == 2) {
+ andika("nimeruka")
+ endelea
+ }
+ andika(v)
+}
+
+/*
+m
+o
+nimeruka
+o
+*/
+```
+
+**CAUTION**
+> In nested loops, the `vunja` and `endelea` keyword MIGHT misbehave
\ No newline at end of file
diff --git a/docs/en/function.md b/docs/en/function.md
new file mode 100644
index 0000000..0a22134
--- /dev/null
+++ b/docs/en/function.md
@@ -0,0 +1,55 @@
+## FUNCTIONS (UNDA)
+
+### Definition
+
+A function block starts with the `unda` keyword, parameters are surrounded by `()` and the body by `{}`. They must also be assigned to a variable as follows:
+```
+fanya jum = unda(x, y) {
+ rudisha x + y
+}
+
+jum(2, 3) // 5
+```
+
+### Parameters
+
+Functions can have zero or any number of arguments. Arguments can be of any type, even other functions:
+```
+fanya salamu = unda() {
+ andika("Habari yako")
+}
+
+salamu()
+
+salamu = unda(jina) {
+ andika("Habari yako", jina)
+}
+
+salamu(asha) // Habari yako asha
+```
+
+### Return (rudisha)
+
+You can return items with the `rudisha` keyword. The `rudisha` keyword will terminate the block and return the value:
+```
+fanya mfano = unda(x) {
+ rudisha "nimerudi"
+ andika(x)
+}
+
+mfano(x) // nimerudi
+```
+
+### Recursion
+
+Nuru also supports recursion. Here's an example:
+```
+fanya fib = unda(n) {
+ kama (n < 3) {
+ rudisha 1
+ } sivyo {
+ rudisha fib(n-1) + fib(n-2)
+ }
+}
+
+andika(fib(10)) // 55
\ No newline at end of file
diff --git a/docs/en/identifiers.md b/docs/en/identifiers.md
new file mode 100644
index 0000000..58cf756
--- /dev/null
+++ b/docs/en/identifiers.md
@@ -0,0 +1,15 @@
+## IDENTIFIERS
+
+Identifiers can contain letters, numbers and underscores. However, identifiers cannot start with a number.
+
+### Example 1
+
+```
+fanya b2020 = 2020
+
+andika(b2020)
+
+fanya c2p = "C to P"
+
+andika(c2p) // "C to P"
+```
\ No newline at end of file
diff --git a/docs/en/ifStatements.md b/docs/en/ifStatements.md
new file mode 100644
index 0000000..3d7f12b
--- /dev/null
+++ b/docs/en/ifStatements.md
@@ -0,0 +1,27 @@
+## IF/ELSE (KAMA/SIVYO)
+
+### Definition
+
+You initiliaze an if block with `kama`, the condition must be inside a paranthesis `()` and the consequence inside a `{}`:
+```
+kama (2>1) {
+ andika(kweli) // kweli
+}
+```
+
+### Else Block
+
+- For multiple conditions, you can use `kama` , `au kama` and `sivyo`:
+```
+fanya a = 10
+
+kama (a > 100) {
+ andika("a imezidi 100")
+} au kama (a < 10) {
+ andika("a ndogo kuliko 10")
+} sivyo {
+ andika("Thamani ya a ni", a)
+}
+
+// it will print 'Thamani ya a ni 10'
+```
\ No newline at end of file
diff --git a/docs/en/keywords.md b/docs/en/keywords.md
new file mode 100644
index 0000000..97e60e7
--- /dev/null
+++ b/docs/en/keywords.md
@@ -0,0 +1,57 @@
+## KEYWORDS
+
+### Reserved
+
+The keywords used in Nuru are below. Note that these words cannot be used as identifiers
+
+
+
+ kweli |
+ sikweli |
+ unda |
+ fanya |
+
+
+ kama |
+ au |
+ sivyo |
+ wakati |
+
+
+ rudisha |
+ vunja |
+ endelea |
+ tupu |
+
+
+ ktk |
+ kwa |
+ badili |
+ ikiwa |
+
+
+ kawaida |
+ |
+ |
+ |
+
+
+
+
+### BuiltIns
+
+The following are some of the in-built functions in Nuru. They are reserved and thus cannot be used as identifiers:
+
+
+
+ andika |
+ aina |
+ jaza |
+
+
+ idadi |
+ sukuma |
+ yamwisho |
+
+
+
\ No newline at end of file
diff --git a/docs/en/null.md b/docs/en/null.md
new file mode 100644
index 0000000..c21e74a
--- /dev/null
+++ b/docs/en/null.md
@@ -0,0 +1,17 @@
+## NULL (TUPU)
+
+- A null data type is a data type with no value. It is defined as:
+```
+fanya a = `tupu`
+```
+
+- Obviously a null data type will evaluate to false:
+```
+kama (a) {
+ andika("niko tupu")
+} sivyo {
+ andika("nimevaa nguo")
+}
+
+// will print 'nimevaa nguo'
+```
\ No newline at end of file
diff --git a/docs/en/numbers.md b/docs/en/numbers.md
new file mode 100644
index 0000000..a857de6
--- /dev/null
+++ b/docs/en/numbers.md
@@ -0,0 +1,74 @@
+## INTEGERS (NAMBA) AND FLOATS (DESIMALI)
+
+### PRECEDENCE
+
+Integers and floats work the way you'd expect them to. They precedence in mathematical operations follow the BODMAS rule:
+
+```go
+2 + 3 * 5 // 17
+
+fanya a = 2.5
+fanya b = 3/5
+
+a + b // 2.8
+```
+
+### UNARY INCREMENTS
+
+You can perform unary increments (++ and --) on both floats and integers. These will add or subtract 1 from the current value. Note that the float or int have to be assigned to a variable for this operation to work. Here's an example:
+
+```go
+fanya i = 2.4
+
+i++ // 3.4
+```
+
+### SHORTHAND ASSIGNMENT
+
+You can also perform shorthand assignments with `+=`, `-=`, `/=`, `*=` and `%=` as follows:
+
+```go
+fanya i = 2
+
+i *= 3 // 6
+i /= 2 // 3
+i += 100 // 103
+i -= 10 // 93
+i %= 90 // 3
+```
+
+### NEGATIVE NUMBERS
+
+Negative numbers also behave as expected:
+
+```go
+fanya i = -10
+
+wakati (i < 0) {
+ andika(i)
+ i++
+}
+
+/*
+-10
+-9
+-8
+-7
+-6
+-5
+-4
+-3
+-2
+-1
+0
+1
+2
+3
+4
+5
+6
+7
+8
+9
+*/
+```
diff --git a/docs/en/operators.md b/docs/en/operators.md
new file mode 100644
index 0000000..142b20e
--- /dev/null
+++ b/docs/en/operators.md
@@ -0,0 +1,73 @@
+## OPERATORS
+
+### ASSIGNMENT
+
+Assuming `i` and `v` are predefined variables, Nuru supports the following assignment operators:
+
+- `i = v`: which is the regular assign operator
+- `i += v`: which is the equivalent of `i = i + v`
+- `i -= v`: which is the equivalent of `i = i - v`
+- `i *= v`: which is the equivalent of `i = i * v`
+- `i /= v`: which is the equivalent of `i = i / v`
+- `i += v`: which is the equivalent of `i = i + v`
+
+For `strings`, `arrays` and `dictionaries`, the `+=` sign operator is permissible. Example:
+```
+list1 += list2 // this is equivalent to list1 = list1 + list2
+```
+
+### ARITHMETIC OPERATORS
+
+The following arithmetic operators are supported:
+
+- `+`: Additon
+- `-`: Subtraction
+- `*`: Multiplication
+- `/`: Division
+- `%`: Modulo (ie the remainder of a division)
+- `**`: Exponential power (eg: `2**3 = 8`)
+
+### COMPARISON OPERATORS
+
+The following comparison operators are supported:
+
+- `==`: Equal to
+- `!=`: Not equal to
+- `>`: Greater than
+- `>=`: Greater than or equal to
+- `<`: Less than
+- `<=`: Less than or equal to
+
+### MEMBER OPERATOR
+
+The member operator in Nuru is `ktk`. It will check if an object exists in another object:
+```go
+fanya majina = ['juma', 'asha', 'haruna']
+
+"haruna" ktk majina // kweli
+"halima" ktk majina // sikweli
+```
+
+### LOGIC OPERATORS
+
+The following logic operators are supported:
+
+- `&&`: Logical `AND`. It will evaluate to true if both are true, otherwise it will evaluate to false.
+- `||`: Logical `OR`. It will evaluate to false if both are false, otherwise it will evaluate to true.
+- `!`: Logical `NOT`. It will evaluate to the opposite of a given expression.
+
+### PRECEDENCE OF OPERATORS
+
+The following is the precedence of operators, starting from the HIGHEST PRIORITY to LOWEST.
+
+- `()` : Items in paranthesis have the highest priority
+- `!`: Negation
+- `%`: Modulo
+- `**`: Exponential power
+- `/, *`: Division and Multiplication
+- `+, +=, -, -=`: Addition and Subtraction
+- `>, >=, <, <=`: Comparison operators
+- `==, !=`: Equal or Not Equal to
+- `=`: Assignment Operator
+- `ktk`: Member Operator
+- `&&, ||`: Logical AND and OR
\ No newline at end of file
diff --git a/docs/en/strings.md b/docs/en/strings.md
new file mode 100644
index 0000000..ea1dbec
--- /dev/null
+++ b/docs/en/strings.md
@@ -0,0 +1,104 @@
+## STRINGS (NENO)
+
+### Definition
+
+Strings can be enclosed in either a single quote `''` or double quotes `""`:
+
+```
+andika("mambo") // mambo
+
+fanya a = 'niaje'
+
+andika("mambo", a) // mambo niaje
+```
+
+### Concatenation
+
+- Strings can also be concatenated as follows:
+
+```
+fanya a = "habari" + " " + "yako"
+
+andika(a) // habari yako
+
+fanya b = "habari"
+
+b += " yako"
+
+// habari yako
+```
+
+- You can also multiply a string `n` number of times:
+
+```
+andika("mambo " * 4)
+
+// mambo mambo mambo mambo
+
+fanya a = "habari"
+
+a *= 4
+
+// habarihabarihabarihabari
+```
+
+### Looping over a String
+
+- You can loop through a string as follows
+
+```
+fanya jina = "avicenna"
+
+kwa i ktk jina {andika(i)}
+
+/*
+ a
+ v
+ i
+ c
+ e
+ n
+ n
+ a
+*/
+```
+
+- And for key, value pairs:
+```go
+kwa i, v ktk jina {
+ andika(i, "=>", v)
+}
+/*
+0 => a
+1 => v
+2 => i
+3 => c
+4 => e
+5 => n
+6 => n
+7 => a
+*/
+```
+
+### Comparing Strings
+
+- You can also check if two strings are the same:
+```
+fanya a = "nuru"
+
+andika(a == "nuru") // kweli
+
+andika(a == "mambo") // sikweli
+```
+
+### Length of a String
+
+You can also check the length of a string with the `idadi` function
+```
+fanya a = "mambo"
+
+idadi(a) // 5
+```
+
+**Please Note**
+> A lot more string methods will be added in the future
\ No newline at end of file
diff --git a/docs/en/switch.md b/docs/en/switch.md
new file mode 100644
index 0000000..5f5b034
--- /dev/null
+++ b/docs/en/switch.md
@@ -0,0 +1,52 @@
+## SWITCH (BADILI)
+
+### Definition
+
+You initialize a switch statement with `badili`, the expression inside parenthesis `()` and all cases will be enclosed inside a bracket `{}`.
+
+A case statement has the keyword `ikiwa` followed by a value to check. Multiple values can be in a single case separated by commas `,`. The consequence to execute if a condition is fulfiled must be inside a bracket `{}`. Here's an example:
+```
+fanya a = 2
+
+badili (a){
+ ikiwa 3 {
+ andika("a ni tatu")
+ }
+ ikiwa 2 {
+ andika ("a ni mbili")
+ }
+}
+```
+
+### Multiple Values in a Case
+
+Multiple possibilites can be assigned to a single case (`ikiwa`) statement:
+```
+badili (a) {
+ ikiwa 1,2,3 {
+ andika("a ni kati ya 1, 2 au 3")
+ }
+ ikiwa 4 {
+ andika("a ni 4")
+ }
+}
+```
+
+### Default (kawaida)
+
+The default statement will be executed when no condition is satisfied. The default statement is represented by `kawaida`:
+```
+fanya z = 20
+
+badili(z) {
+ ikiwa 10 {
+ andika("kumi")
+ }
+ ikiwa 30 {
+ andika("thelathini")
+ }
+ kawaida {
+ andika("ishirini")
+ }
+}
+```
\ No newline at end of file
diff --git a/docs/en/while.md b/docs/en/while.md
new file mode 100644
index 0000000..791e619
--- /dev/null
+++ b/docs/en/while.md
@@ -0,0 +1,65 @@
+## WHILE (WAKATI)
+
+### Definition
+
+A while loop is executed when a specified condition is true. You initiliaze a while loop with the `wakati` keyword followed by the condition in paranthesis `()`. The consequence of the loop should be enclosed in brackets `{}`:
+```
+fanya i = 1
+
+wakati (i <= 5) {
+ andika(i)
+ i++
+}
+/*
+1
+2
+3
+4
+5
+*/
+```
+
+### Break (vunja) and Continue (endelea)
+
+- A loop can be terminated using the `vunja` keyword:
+```
+fanya i = 1
+
+wakati (i < 5) {
+ kama (i == 3) {
+ andika("nimevunja")
+ vunja
+ }
+ andika(i)
+ i++
+}
+/*
+1
+2
+nimevunja
+*/
+```
+
+- A specific iteration can be skipped using the `endelea` keyword:
+```
+fanya i = 0
+
+wakati (i < 5) {
+ i++
+ kama (i == 3) {
+ andika("nimeruka")
+ endelea
+ }
+ andika(i)
+}
+/*
+1
+2
+nimeruka
+4
+5
+*/
+```
+
+**CAUTION**
+> In nested loops, the `vunja` and `endelea` keyword MIGHT misbehave
diff --git a/docs/sw/README.md b/docs/sw/README.md
new file mode 100644
index 0000000..698fdad
--- /dev/null
+++ b/docs/sw/README.md
@@ -0,0 +1 @@
+# NURU PROGRAMMING LANGUAGE DOCUMENTATION
\ No newline at end of file
diff --git a/docs/sw/arrays.md b/docs/sw/arrays.md
new file mode 100644
index 0000000..bbc6434
--- /dev/null
+++ b/docs/sw/arrays.md
@@ -0,0 +1 @@
+# Orodha (Arrays)
\ No newline at end of file
diff --git a/docs/sw/bools.md b/docs/sw/bools.md
new file mode 100644
index 0000000..16f7a44
--- /dev/null
+++ b/docs/sw/bools.md
@@ -0,0 +1 @@
+# Kweli/Sikweli (Bools)
diff --git a/docs/sw/builtins.md b/docs/sw/builtins.md
new file mode 100644
index 0000000..f807886
--- /dev/null
+++ b/docs/sw/builtins.md
@@ -0,0 +1 @@
+# Builtins
\ No newline at end of file
diff --git a/docs/sw/comments.md b/docs/sw/comments.md
new file mode 100644
index 0000000..2a30807
--- /dev/null
+++ b/docs/sw/comments.md
@@ -0,0 +1 @@
+# Maelezo (Comments)
\ No newline at end of file
diff --git a/docs/sw/dictionaries.md b/docs/sw/dictionaries.md
new file mode 100644
index 0000000..469ad6d
--- /dev/null
+++ b/docs/sw/dictionaries.md
@@ -0,0 +1 @@
+# Kamusi (Dictionaries)
\ No newline at end of file
diff --git a/docs/sw/for.md b/docs/sw/for.md
new file mode 100644
index 0000000..52b9109
--- /dev/null
+++ b/docs/sw/for.md
@@ -0,0 +1 @@
+# Kwa (For)
\ No newline at end of file
diff --git a/docs/sw/functions.md b/docs/sw/functions.md
new file mode 100644
index 0000000..ed2c3d8
--- /dev/null
+++ b/docs/sw/functions.md
@@ -0,0 +1 @@
+# Undo (Functions)
\ No newline at end of file
diff --git a/docs/sw/identifiers.md b/docs/sw/identifiers.md
new file mode 100644
index 0000000..d89ef81
--- /dev/null
+++ b/docs/sw/identifiers.md
@@ -0,0 +1 @@
+# Tambulishi (Identifiers)
\ No newline at end of file
diff --git a/docs/sw/if.md b/docs/sw/if.md
new file mode 100644
index 0000000..a364c61
--- /dev/null
+++ b/docs/sw/if.md
@@ -0,0 +1 @@
+# Kama/Sivyo (If/Else)
\ No newline at end of file
diff --git a/docs/sw/keywords.md b/docs/sw/keywords.md
new file mode 100644
index 0000000..5160a95
--- /dev/null
+++ b/docs/sw/keywords.md
@@ -0,0 +1 @@
+# Maneno Muhimu (Keywords)
\ No newline at end of file
diff --git a/docs/sw/null.md b/docs/sw/null.md
new file mode 100644
index 0000000..628c65e
--- /dev/null
+++ b/docs/sw/null.md
@@ -0,0 +1 @@
+# Tupu (Null)
\ No newline at end of file
diff --git a/docs/sw/numbers.md b/docs/sw/numbers.md
new file mode 100644
index 0000000..dd4ec8b
--- /dev/null
+++ b/docs/sw/numbers.md
@@ -0,0 +1 @@
+# Namba na Desimali (Ints/Floats)
\ No newline at end of file
diff --git a/docs/sw/operators.md b/docs/sw/operators.md
new file mode 100644
index 0000000..cf38456
--- /dev/null
+++ b/docs/sw/operators.md
@@ -0,0 +1 @@
+# Matendaji (Operators)
\ No newline at end of file
diff --git a/docs/sw/strings.md b/docs/sw/strings.md
new file mode 100644
index 0000000..0185efb
--- /dev/null
+++ b/docs/sw/strings.md
@@ -0,0 +1 @@
+# Neno (Strings)
\ No newline at end of file
diff --git a/docs/sw/switch.md b/docs/sw/switch.md
new file mode 100644
index 0000000..ec1fc19
--- /dev/null
+++ b/docs/sw/switch.md
@@ -0,0 +1 @@
+# Badili (Switch)
\ No newline at end of file
diff --git a/docs/sw/while.md b/docs/sw/while.md
new file mode 100644
index 0000000..e022883
--- /dev/null
+++ b/docs/sw/while.md
@@ -0,0 +1 @@
+# Wakati (While)
\ No newline at end of file
diff --git a/evaluator/builtins.go b/evaluator/builtins.go
deleted file mode 100644
index f07138c..0000000
--- a/evaluator/builtins.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package evaluator
-
-import (
- "bufio"
- "fmt"
- "io"
- "os"
-
- "github.com/AvicennaJr/Nuru/object"
-)
-
-var builtins = map[string]*object.Builtin{
- "idadi": {
- Fn: func(args ...object.Object) object.Object {
- if len(args) != 1 {
- return newError("Hoja hazilingani, tunahitaji=1, tumepewa=%d", len(args))
- }
-
- switch arg := args[0].(type) {
- case *object.Array:
- return &object.Integer{Value: int64(len(arg.Elements))}
- case *object.String:
- return &object.Integer{Value: int64(len(arg.Value))}
- default:
- return newError("Samahani, hii function haitumiki na %s", args[0].Type())
- }
- },
- },
- "yamwisho": {
- Fn: func(args ...object.Object) object.Object {
- if len(args) != 1 {
- return newError("Samahani, tunahitaji Hoja moja tu, wewe umeweka %d", len(args))
- }
- if args[0].Type() != object.ARRAY_OBJ {
- return newError("Samahani, hii function haitumiki na %s", args[0].Type())
- }
-
- arr := args[0].(*object.Array)
- length := len(arr.Elements)
- if length > 0 {
- return arr.Elements[length-1]
- }
-
- return NULL
- },
- },
- "sukuma": {
- Fn: func(args ...object.Object) object.Object {
- if len(args) != 2 {
- return newError("Samahani, tunahitaji Hoja 2, wewe umeweka %d", len(args))
- }
- if args[0].Type() != object.ARRAY_OBJ {
- return newError("Samahani, hii function haitumiki na %s", args[0].Type())
- }
-
- arr := args[0].(*object.Array)
- length := len(arr.Elements)
-
- newElements := make([]object.Object, length+1)
- copy(newElements, arr.Elements)
- newElements[length] = args[1]
-
- return &object.Array{Elements: newElements}
- },
- },
- "jaza": {
- Fn: func(args ...object.Object) object.Object {
-
- if len(args) > 1 {
- return newError("Samahani, hii function inapokea hoja 0 au 1, wewe umeweka %d", len(args))
- }
-
- if len(args) > 0 && args[0].Type() != object.STRING_OBJ {
- return newError(fmt.Sprintf(`Tafadhali tumia alama ya nukuu: "%s"`, args[0].Inspect()))
- }
- if len(args) == 1 {
- prompt := args[0].(*object.String).Value
- fmt.Fprint(os.Stdout, prompt)
- }
-
- buffer := bufio.NewReader(os.Stdin)
-
- line, _, err := buffer.ReadLine()
- if err != nil && err != io.EOF {
- return newError("Nimeshindwa kusoma uliyo yajaza")
- }
-
- return &object.String{Value: string(line)}
- },
- },
- "andika": {
- Fn: func(args ...object.Object) object.Object {
- if len(args) == 0 {
- fmt.Println("")
- } else {
- for _, arg := range args {
-
- fmt.Println(arg.Inspect())
- }
- }
- return nil
- },
- },
-}
diff --git a/evaluator/evaluator.go b/evaluator/evaluator.go
deleted file mode 100644
index 9b2726b..0000000
--- a/evaluator/evaluator.go
+++ /dev/null
@@ -1,544 +0,0 @@
-package evaluator
-
-import (
- "fmt"
- "strings"
-
- "github.com/AvicennaJr/Nuru/ast"
- "github.com/AvicennaJr/Nuru/object"
-)
-
-var (
- NULL = &object.Null{}
- TRUE = &object.Boolean{Value: true}
- FALSE = &object.Boolean{Value: false}
-)
-
-func Eval(node ast.Node, env *object.Environment) object.Object {
- switch node := node.(type) {
- case *ast.Program:
- return evalProgram(node, env)
-
- case *ast.ExpressionStatement:
- return Eval(node.Expression, env)
-
- case *ast.IntegerLiteral:
- return &object.Integer{Value: node.Value}
-
- case *ast.Boolean:
- return nativeBoolToBooleanObject(node.Value)
-
- case *ast.PrefixExpression:
- right := Eval(node.Right, env)
- if isError(right) {
- return right
- }
- return evalPrefixExpression(node.Operator, right)
-
- case *ast.InfixExpression:
- left := Eval(node.Left, env)
- if isError(left) {
- return left
- }
- right := Eval(node.Right, env)
- if isError(right) {
- return right
- }
- return evalInfixExpression(node.Operator, left, right)
-
- case *ast.BlockStatement:
- return evalBlockStatement(node, env)
-
- case *ast.IfExpression:
- return evalIfExpression(node, env)
-
- case *ast.ReturnStatement:
- val := Eval(node.ReturnValue, env)
- if isError(val) {
- return val
- }
- return &object.ReturnValue{Value: val}
-
- case *ast.LetStatement:
- val := Eval(node.Value, env)
- if isError(val) {
- return val
- }
-
- env.Set(node.Name.Value, val)
-
- case *ast.Identifier:
- return evalIdentifier(node, env)
-
- case *ast.FunctionLiteral:
- params := node.Parameters
- body := node.Body
- return &object.Function{Parameters: params, Env: env, Body: body}
-
- case *ast.CallExpression:
- function := Eval(node.Function, env)
- if isError(function) {
- return function
- }
- args := evalExpressions(node.Arguments, env)
- if len(args) == 1 && isError(args[0]) {
- return args[0]
- }
- return applyFunction(function, args)
- case *ast.StringLiteral:
- return &object.String{Value: node.Value}
-
- case *ast.ArrayLiteral:
- elements := evalExpressions(node.Elements, env)
- if len(elements) == 1 && isError(elements[0]) {
- return elements[0]
- }
- return &object.Array{Elements: elements}
- case *ast.IndexExpression:
- left := Eval(node.Left, env)
- if isError(left) {
- return left
- }
- index := Eval(node.Index, env)
- if isError(index) {
- return index
- }
- return evalIndexExpression(left, index)
- case *ast.DictLiteral:
- return evalDictLiteral(node, env)
- case *ast.WhileExpression:
- return evalWhileExpression(node, env)
- case *ast.AssignmentExpression:
- left := Eval(node.Left, env)
- if isError(left) {
- return left
- }
-
- value := Eval(node.Value, env)
- if isError(value) {
- return value
- }
-
- if ident, ok := node.Left.(*ast.Identifier); ok {
- env.Set(ident.Value, value)
- } else if ie, ok := node.Left.(*ast.IndexExpression); ok {
- obj := Eval(ie.Left, env)
- if isError(obj) {
- return obj
- }
-
- if array, ok := obj.(*object.Array); ok {
- index := Eval(ie.Index, env)
- if isError(index) {
- return index
- }
- if idx, ok := index.(*object.Integer); ok {
- if int(idx.Value) > len(array.Elements) {
- return newError("Index imezidi idadi ya elements")
- }
- array.Elements[idx.Value] = value
- } else {
- return newError("Hauwezi kufanya opereshen hii na %#v", index)
- }
- } else if hash, ok := obj.(*object.Dict); ok {
- key := Eval(ie.Index, env)
- if isError(key) {
- return key
- }
- if hashKey, ok := key.(object.Hashable); ok {
- hashed := hashKey.HashKey()
- hash.Pairs[hashed] = object.DictPair{Key: key, Value: value}
- } else {
- return newError("Hauwezi kufanya opereshen hii na %T", key)
- }
- } else {
- return newError("%T haifanyi operation hii", obj)
- }
- } else {
- return newError("Tumia neno kama variable, sio %T", left)
- }
-
- }
-
- return nil
-}
-
-func evalProgram(program *ast.Program, env *object.Environment) object.Object {
- var result object.Object
-
- for _, statment := range program.Statements {
- result = Eval(statment, env)
-
- switch result := result.(type) {
- case *object.ReturnValue:
- return result.Value
- case *object.Error:
- return result
- }
- }
-
- return result
-}
-
-func nativeBoolToBooleanObject(input bool) *object.Boolean {
- if input {
- return TRUE
- }
- return FALSE
-}
-
-func evalPrefixExpression(operator string, right object.Object) object.Object {
- switch operator {
- case "!":
- return evalBangOperatorExpression(right)
- case "-":
- return evalMinusPrefixOperatorExpression(right)
- default:
- return newError("operesheni haieleweki: %s%s", operator, right.Type())
- }
-}
-
-func evalBangOperatorExpression(right object.Object) object.Object {
- switch right {
- case TRUE:
- return FALSE
- case FALSE:
- return TRUE
- case NULL:
- return TRUE
- default:
- return FALSE
- }
-}
-
-func evalMinusPrefixOperatorExpression(right object.Object) object.Object {
- if right.Type() != object.INTEGER_OBJ {
- return newError("Operesheni Haielweki: -%s", right.Type())
- }
-
- value := right.(*object.Integer).Value
- return &object.Integer{Value: -value}
-}
-
-func evalInfixExpression(
- operator string,
- left, right object.Object,
-) object.Object {
- switch {
-
- case operator == "+" && left.Type() == object.DICT_OBJ && right.Type() == object.DICT_OBJ:
- leftVal := left.(*object.Dict).Pairs
- rightVal := right.(*object.Dict).Pairs
- pairs := make(map[object.HashKey]object.DictPair)
- for k, v := range leftVal {
- pairs[k] = v
- }
- for k, v := range rightVal {
- pairs[k] = v
- }
- return &object.Dict{Pairs: pairs}
-
- case operator == "+" && left.Type() == object.ARRAY_OBJ && right.Type() == object.ARRAY_OBJ:
- leftVal := left.(*object.Array).Elements
- rightVal := right.(*object.Array).Elements
- elements := make([]object.Object, len(leftVal)+len(rightVal))
- elements = append(leftVal, rightVal...)
- return &object.Array{Elements: elements}
-
- case operator == "*" && left.Type() == object.ARRAY_OBJ && right.Type() == object.INTEGER_OBJ:
- leftVal := left.(*object.Array).Elements
- rightVal := int(right.(*object.Integer).Value)
- elements := leftVal
- for i := rightVal; i > 1; i-- {
- elements = append(elements, leftVal...)
- }
- return &object.Array{Elements: elements}
-
- case operator == "*" && left.Type() == object.INTEGER_OBJ && right.Type() == object.ARRAY_OBJ:
- leftVal := int(left.(*object.Integer).Value)
- rightVal := right.(*object.Array).Elements
- elements := rightVal
- for i := leftVal; i > 1; i-- {
- elements = append(elements, rightVal...)
- }
- return &object.Array{Elements: elements}
-
- case operator == "*" && left.Type() == object.STRING_OBJ && right.Type() == object.INTEGER_OBJ:
- leftVal := left.(*object.String).Value
- rightVal := right.(*object.Integer).Value
- return &object.String{Value: strings.Repeat(leftVal, int(rightVal))}
-
- case operator == "*" && left.Type() == object.INTEGER_OBJ && right.Type() == object.STRING_OBJ:
- leftVal := left.(*object.Integer).Value
- rightVal := right.(*object.String).Value
- return &object.String{Value: strings.Repeat(rightVal, int(leftVal))}
-
- case left.Type() == object.INTEGER_OBJ && right.Type() == object.INTEGER_OBJ:
- return evalIntegerInfixExpression(operator, left, right)
-
- case operator == "==":
- return nativeBoolToBooleanObject(left == right)
-
- case operator == "!=":
- return nativeBoolToBooleanObject(left != right)
-
- case left.Type() != right.Type():
- return newError("Aina Hazilingani: %s %s %s",
- left.Type(), operator, right.Type())
-
- case left.Type() == object.STRING_OBJ && right.Type() == object.STRING_OBJ:
- return evalStringInfixExpression(operator, left, right)
-
- default:
- return newError("Operesheni Haielweki: %s %s %s",
- left.Type(), operator, right.Type())
- }
-}
-
-func evalIntegerInfixExpression(
- operator string,
- left, right object.Object,
-) object.Object {
- leftVal := left.(*object.Integer).Value
- rightVal := right.(*object.Integer).Value
-
- switch operator {
- case "+":
- return &object.Integer{Value: leftVal + rightVal}
- case "-":
- return &object.Integer{Value: leftVal - rightVal}
- case "*":
- return &object.Integer{Value: leftVal * rightVal}
- case "/":
- return &object.Integer{Value: leftVal / rightVal}
- case "<":
- return nativeBoolToBooleanObject(leftVal < rightVal)
- case ">":
- return nativeBoolToBooleanObject(leftVal > rightVal)
- case "==":
- return nativeBoolToBooleanObject(leftVal == rightVal)
- case "!=":
- return nativeBoolToBooleanObject(leftVal != rightVal)
- default:
- return newError("Operesheni Haielweki: %s %s %s",
- left.Type(), operator, right.Type())
- }
-}
-
-func evalIfExpression(ie *ast.IfExpression, env *object.Environment) object.Object {
- condition := Eval(ie.Condition, env)
-
- if isError(condition) {
- return condition
- }
-
- if isTruthy(condition) {
- return Eval(ie.Consequence, env)
- } else if ie.Alternative != nil {
- return Eval(ie.Alternative, env)
- } else {
- return NULL
- }
-}
-
-func isTruthy(obj object.Object) bool {
- switch obj {
- case NULL:
- return false
- case TRUE:
- return true
- case FALSE:
- return false
- default:
- return true
- }
-}
-
-func evalBlockStatement(block *ast.BlockStatement, env *object.Environment) object.Object {
- var result object.Object
-
- for _, statment := range block.Statements {
- result = Eval(statment, env)
-
- if result != nil {
- rt := result.Type()
- if rt == object.RETURN_VALUE_OBJ || rt == object.ERROR_OBJ {
- return result
- }
- }
- }
-
- return result
-}
-
-func newError(format string, a ...interface{}) *object.Error {
- format = fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, format)
- return &object.Error{Message: fmt.Sprintf(format, a...)}
-}
-
-func isError(obj object.Object) bool {
- if obj != nil {
- return obj.Type() == object.ERROR_OBJ
- }
-
- return false
-}
-
-func evalIdentifier(node *ast.Identifier, env *object.Environment) object.Object {
- if val, ok := env.Get(node.Value); ok {
- return val
- }
- if builtin, ok := builtins[node.Value]; ok {
- return builtin
- }
-
- return newError("Neno Halifahamiki: " + node.Value)
-}
-
-func evalExpressions(exps []ast.Expression, env *object.Environment) []object.Object {
- var result []object.Object
-
- for _, e := range exps {
- evaluated := Eval(e, env)
- if isError(evaluated) {
- return []object.Object{evaluated}
- }
-
- result = append(result, evaluated)
- }
-
- return result
-}
-
-func applyFunction(fn object.Object, args []object.Object) object.Object {
- switch fn := fn.(type) {
- case *object.Function:
- extendedEnv := extendedFunctionEnv(fn, args)
- evaluated := Eval(fn.Body, extendedEnv)
- return unwrapReturnValue(evaluated)
- case *object.Builtin:
- return fn.Fn(args...)
- default:
- return newError("sio function: %s", fn.Type())
- }
-
-}
-
-func extendedFunctionEnv(fn *object.Function, args []object.Object) *object.Environment {
- env := object.NewEnclosedEnvironment(fn.Env)
-
- for paramIdx, param := range fn.Parameters {
- if paramIdx < len(args) {
- env.Set(param.Value, args[paramIdx])
- }
- }
- return env
-}
-
-func unwrapReturnValue(obj object.Object) object.Object {
- if returnValue, ok := obj.(*object.ReturnValue); ok {
- return returnValue.Value
- }
-
- return obj
-}
-
-func evalStringInfixExpression(operator string, left, right object.Object) object.Object {
- if operator != "+" {
- return newError("Operesheni Haielweki: %s %s %s", left.Type(), operator, right.Type())
- }
-
- leftVal := left.(*object.String).Value
- rightVal := right.(*object.String).Value
-
- return &object.String{Value: leftVal + rightVal}
-}
-
-func evalIndexExpression(left, index object.Object) object.Object {
- switch {
- case left.Type() == object.ARRAY_OBJ && index.Type() == object.INTEGER_OBJ:
- return evalArrayIndexExpression(left, index)
- case left.Type() == object.ARRAY_OBJ && index.Type() != object.INTEGER_OBJ:
- return newError("Tafadhali tumia number, sio: %s", index.Type())
- case left.Type() == object.DICT_OBJ:
- return evalDictIndexExpression(left, index)
- default:
- return newError("Operesheni hii haiwezekani kwa: %s", left.Type())
- }
-}
-
-func evalArrayIndexExpression(array, index object.Object) object.Object {
- arrayObject := array.(*object.Array)
- idx := index.(*object.Integer).Value
- max := int64(len(arrayObject.Elements) - 1)
-
- if idx < 0 || idx > max {
- return NULL
- }
-
- return arrayObject.Elements[idx]
-}
-
-func evalDictLiteral(node *ast.DictLiteral, env *object.Environment) object.Object {
- pairs := make(map[object.HashKey]object.DictPair)
-
- for keyNode, valueNode := range node.Pairs {
- key := Eval(keyNode, env)
- if isError(key) {
- return key
- }
-
- hashKey, ok := key.(object.Hashable)
- if !ok {
- return newError("Hashing imeshindikana: %s", key.Type())
- }
-
- value := Eval(valueNode, env)
- if isError(value) {
- return value
- }
-
- hashed := hashKey.HashKey()
- pairs[hashed] = object.DictPair{Key: key, Value: value}
- }
-
- return &object.Dict{Pairs: pairs}
-}
-
-func evalDictIndexExpression(dict, index object.Object) object.Object {
- dictObject := dict.(*object.Dict)
-
- key, ok := index.(object.Hashable)
- if !ok {
- return newError("Samahani, %s haitumiki kama key", index.Type())
- }
-
- pair, ok := dictObject.Pairs[key.HashKey()]
- if !ok {
- return NULL
- }
-
- return pair.Value
-}
-
-func evalWhileExpression(we *ast.WhileExpression, env *object.Environment) object.Object {
- var result object.Object
-
- for {
- condition := Eval(we.Condition, env)
- if isError(condition) {
- return condition
- }
-
- if isTruthy(condition) {
- result = Eval(we.Consequence, env)
- } else {
- break
- }
- }
-
- if result != nil {
- return result
- }
- return nil
-}
diff --git a/lexer/lexer.go b/lexer/lexer.go
deleted file mode 100644
index 3df1a45..0000000
--- a/lexer/lexer.go
+++ /dev/null
@@ -1,193 +0,0 @@
-package lexer
-
-import (
- "github.com/AvicennaJr/Nuru/token"
-)
-
-type Lexer struct {
- input string // string or runes. Runes should be better.
- position int
- readPosition int
- ch byte // make this a rune too
-}
-
-func New(input string) *Lexer {
- l := &Lexer{input: input}
- l.readChar()
- return l
-}
-
-func (l *Lexer) readChar() {
- if l.readPosition >= len(l.input) {
- l.ch = 0
- } else {
- l.ch = l.input[l.readPosition]
- }
-
- l.position = l.readPosition
- l.readPosition += 1
-}
-
-func (l *Lexer) NextToken() token.Token {
- var tok token.Token
- l.skipWhitespace()
- if l.ch == '/' && l.peekChar() == '/' {
- l.skipSingleLineComment()
- return l.NextToken()
- }
- if l.ch == '/' && l.peekChar() == '*' {
- l.skipMultiLineComment()
- return l.NextToken()
- }
-
- switch l.ch {
- case '=':
- if l.peekChar() == '=' {
- ch := l.ch
- l.readChar()
- tok = token.Token{Type: token.EQ, Literal: string(ch) + string(l.ch)}
- } else {
- tok = newToken(token.ASSIGN, l.ch)
- }
- case ';':
- tok = newToken(token.SEMICOLON, l.ch)
- case '(':
- tok = newToken(token.LPAREN, l.ch)
- case ')':
- tok = newToken(token.RPAREN, l.ch)
- case '{':
- tok = newToken(token.LBRACE, l.ch)
- case '}':
- tok = newToken(token.RBRACE, l.ch)
- case ',':
- tok = newToken(token.COMMA, l.ch)
- case '+':
- tok = newToken(token.PLUS, l.ch)
- case '-':
- tok = newToken(token.MINUS, l.ch)
- case '!':
- if l.peekChar() == '=' {
- ch := l.ch
- l.readChar()
- tok = token.Token{Type: token.NOT_EQ, Literal: string(ch) + string(l.ch)}
- } else {
- tok = newToken(token.BANG, l.ch)
- }
- case '/':
- tok = newToken(token.SLASH, l.ch)
- case '*':
- tok = newToken(token.ASTERISK, l.ch)
- case '<':
- tok = newToken(token.LT, l.ch)
- case '>':
- tok = newToken(token.GT, l.ch)
- case '"':
- tok.Type = token.STRING
- tok.Literal = l.readString()
- case '[':
- tok = newToken(token.LBRACKET, l.ch)
- case ']':
- tok = newToken(token.RBRACKET, l.ch)
- case ':':
- tok = newToken(token.COLON, l.ch)
- case 0:
- tok.Literal = ""
- tok.Type = token.EOF
- default:
- if isLetter(l.ch) {
- tok.Literal = l.readIdentifier()
- tok.Type = token.LookupIdent(tok.Literal)
- return tok
- } else if isDigit(l.ch) {
- tok.Type = token.INT
- tok.Literal = l.readNumber()
- return tok
- } else {
- tok = newToken(token.ILLEGAL, l.ch)
- }
- }
-
- l.readChar()
- return tok
-}
-
-func newToken(tokenType token.TokenType, ch byte) token.Token {
- return token.Token{Type: tokenType, Literal: string(ch)}
-}
-
-func (l *Lexer) readIdentifier() string {
- position := l.position
-
- for isLetter(l.ch) {
- l.readChar()
- }
- return l.input[position:l.position]
-}
-
-func isLetter(ch byte) bool {
- return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch == '?' || ch == '&'
-}
-
-func (l *Lexer) skipWhitespace() {
- for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
- l.readChar()
- }
-}
-
-func (l *Lexer) readNumber() string {
- position := l.position
- for isDigit(l.ch) {
- l.readChar()
- }
- return l.input[position:l.position]
-}
-
-func isDigit(ch byte) bool {
- return '0' <= ch && ch <= '9'
-}
-
-func (l *Lexer) peekChar() byte {
- if l.readPosition >= len(l.input) {
- return 0
- } else {
- return l.input[l.readPosition]
- }
-}
-
-func (l *Lexer) skipSingleLineComment() {
- for l.ch != '\n' && l.ch != 0 {
- l.readChar()
- }
- l.skipWhitespace()
-}
-
-func (l *Lexer) skipMultiLineComment() {
- endFound := false
-
- for !endFound {
- if l.ch == 0 {
- endFound = true
- }
-
- if l.ch == '*' && l.peekChar() == '/' {
- endFound = true
- l.readChar()
- }
-
- l.readChar()
- }
-
- l.skipWhitespace()
-}
-
-func (l *Lexer) readString() string {
- position := l.position + 1
- for {
- l.readChar()
- if l.ch == '"' || l.ch == 0 {
- break
- }
- }
-
- return l.input[position:l.position]
-}
diff --git a/main.go b/main.go
deleted file mode 100644
index 54ef51a..0000000
--- a/main.go
+++ /dev/null
@@ -1,58 +0,0 @@
-package main
-
-import (
- "flag"
- "fmt"
- "io/ioutil"
- "os"
-
- "github.com/AvicennaJr/Nuru/repl"
-)
-
-const (
- LOGO = `
-
-█░░ █░█ █▀▀ █░█ ▄▀█ █▄█ ▄▀█ █▄░█ █░█ █▀█ █░█
-█▄▄ █▄█ █▄█ █▀█ █▀█ ░█░ █▀█ █░▀█ █▄█ █▀▄ █▄█
-
- | Authored by Avicenna |
-`
- VERSION = "v0.1.0"
-)
-
-func main() {
-
- version := flag.Bool("v", false, "Onyesha version namba ya program")
- flag.Parse()
-
- if *version {
- fmt.Println(fmt.Sprintf("\x1b[%dm%s%s\x1b[0m", 32, "Nuru Programming Language || Version: ", VERSION))
- os.Exit(0)
- }
- args := flag.Args()
-
- if len(args) < 1 {
-
- coloredLogo := fmt.Sprintf("\x1b[%dm%s\x1b[0m", 34, LOGO)
- fmt.Println(coloredLogo)
- fmt.Println("𝑯𝒂𝒃𝒂𝒓𝒊, 𝒌𝒂𝒓𝒊𝒃𝒖 𝒖𝒕𝒖𝒎𝒊𝒆 𝒍𝒖𝒈𝒉𝒂 𝒚𝒂 𝑵𝒖𝒓𝒖 ✨")
- fmt.Println("\nTumia exit() au toka() kuondoka")
-
- repl.Start(os.Stdin, os.Stdout)
- } else if len(args) == 1 {
-
- file := args[0]
- contents, err := ioutil.ReadFile(file)
- if err != nil {
- fmt.Println(fmt.Sprintf("\x1b[%dm%s%s\x1b[0m", 31, "Error: Nimeshindwa kusoma file: ", args[0]))
- os.Exit(0)
- }
-
- repl.Read(string(contents))
-
- } else {
- fmt.Println(fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, "Error: Opereshen imeshindikana boss."))
- fmt.Println(fmt.Sprintf("\x1b[%dm%s\x1b[0m", 32, "\nTumia Command: 'nuru' kutmia program AU\nTumia Command: 'nuru' ikifuatwa na program file.\n\n\tMfano:\tnuru fileYangu.nr\n"))
- os.Exit(0)
- }
-}
diff --git a/object/object.go b/object/object.go
deleted file mode 100644
index fba6726..0000000
--- a/object/object.go
+++ /dev/null
@@ -1,191 +0,0 @@
-package object
-
-import (
- "bytes"
- "fmt"
- "hash/fnv"
- "strings"
-
- "github.com/AvicennaJr/Nuru/ast"
-)
-
-type ObjectType string
-
-const (
- INTEGER_OBJ = "NAMBA"
- BOOLEAN_OBJ = "BOOLEAN"
- NULL_OBJ = "NULL"
- RETURN_VALUE_OBJ = "RUDISHA"
- ERROR_OBJ = "KOSA"
- FUNCTION_OBJ = "FUNCTION"
- STRING_OBJ = "NENO"
- BUILTIN_OBJ = "YA_NDANI"
- ARRAY_OBJ = "ARRAY"
- DICT_OBJ = "KAMUSI"
-)
-
-type Object interface {
- Type() ObjectType
- Inspect() string
-}
-
-type Integer struct {
- Value int64
-}
-
-func (i *Integer) Inspect() string { return fmt.Sprintf("%d", i.Value) }
-func (i *Integer) Type() ObjectType { return INTEGER_OBJ }
-
-type Boolean struct {
- Value bool
-}
-
-func (b *Boolean) Inspect() string {
- if b.Value {
- return "kweli"
- } else {
- return "sikweli"
- }
-}
-func (b *Boolean) Type() ObjectType { return BOOLEAN_OBJ }
-
-type Null struct{}
-
-func (n *Null) Inspect() string { return "null" }
-func (n *Null) Type() ObjectType { return NULL_OBJ }
-
-type ReturnValue struct {
- Value Object
-}
-
-func (rv *ReturnValue) Inspect() string { return rv.Value.Inspect() }
-func (rv *ReturnValue) Type() ObjectType { return RETURN_VALUE_OBJ }
-
-type Error struct {
- Message string
-}
-
-func (e *Error) Inspect() string {
- msg := fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, "ERROR: ")
- return msg + e.Message
-}
-func (e *Error) Type() ObjectType { return ERROR_OBJ }
-
-type Function struct {
- Parameters []*ast.Identifier
- Body *ast.BlockStatement
- Env *Environment
-}
-
-func (f *Function) Type() ObjectType { return FUNCTION_OBJ }
-func (f *Function) Inspect() string {
- var out bytes.Buffer
-
- params := []string{}
- for _, p := range f.Parameters {
- params = append(params, p.String())
- }
-
- out.WriteString("fn")
- out.WriteString("(")
- out.WriteString(strings.Join(params, ", "))
- out.WriteString(") {\n")
- out.WriteString(f.Body.String())
- out.WriteString("\n}")
-
- return out.String()
-}
-
-type String struct {
- Value string
-}
-
-func (s *String) Inspect() string { return s.Value }
-func (s *String) Type() ObjectType { return STRING_OBJ }
-
-type BuiltinFunction func(args ...Object) Object
-
-type Builtin struct {
- Fn BuiltinFunction
-}
-
-func (b *Builtin) Inspect() string { return "builtin function" }
-func (b *Builtin) Type() ObjectType { return BUILTIN_OBJ }
-
-type Array struct {
- Elements []Object
-}
-
-func (ao *Array) Type() ObjectType { return ARRAY_OBJ }
-func (ao *Array) Inspect() string {
- var out bytes.Buffer
-
- elements := []string{}
- for _, e := range ao.Elements {
- elements = append(elements, e.Inspect())
- }
-
- out.WriteString("[")
- out.WriteString(strings.Join(elements, ", "))
- out.WriteString("]")
-
- return out.String()
-}
-
-type HashKey struct {
- Type ObjectType
- Value uint64
-}
-
-func (b *Boolean) HashKey() HashKey {
- var value uint64
-
- if b.Value {
- value = 1
- } else {
- value = 0
- }
-
- return HashKey{Type: b.Type(), Value: value}
-}
-
-func (i *Integer) HashKey() HashKey {
- return HashKey{Type: i.Type(), Value: uint64(i.Value)}
-}
-
-func (s *String) HashKey() HashKey {
- h := fnv.New64a()
- h.Write([]byte(s.Value))
-
- return HashKey{Type: s.Type(), Value: h.Sum64()}
-}
-
-type DictPair struct {
- Key Object
- Value Object
-}
-
-type Dict struct {
- Pairs map[HashKey]DictPair
-}
-
-func (d *Dict) Type() ObjectType { return DICT_OBJ }
-func (d *Dict) Inspect() string {
- var out bytes.Buffer
-
- pairs := []string{}
-
- for _, pair := range d.Pairs {
- pairs = append(pairs, fmt.Sprintf("%s: %s", pair.Key.Inspect(), pair.Value.Inspect()))
- }
-
- out.WriteString("{")
- out.WriteString(strings.Join(pairs, ", "))
- out.WriteString("}")
-
- return out.String()
-}
-
-type Hashable interface {
- HashKey() HashKey
-}
diff --git a/parser/parser.go b/parser/parser.go
deleted file mode 100644
index 1018ace..0000000
--- a/parser/parser.go
+++ /dev/null
@@ -1,535 +0,0 @@
-package parser
-
-import (
- "fmt"
- "strconv"
-
- "github.com/AvicennaJr/Nuru/ast"
- "github.com/AvicennaJr/Nuru/lexer"
- "github.com/AvicennaJr/Nuru/token"
-)
-
-const (
- // Think of BODMAS
- _ int = iota
- LOWEST
- ASSIGN // =
- EQUALS // ==
- LESSGREATER // > OR <
- SUM // +
- PRODUCT // *
- PREFIX // -X OR !X
- CALL // myFunction(X)
- INDEX // Arrays
-)
-
-var precedences = map[token.TokenType]int{
- token.ASSIGN: ASSIGN, // Lowest priority
- token.EQ: EQUALS,
- token.NOT_EQ: EQUALS,
- token.LT: LESSGREATER,
- token.GT: LESSGREATER,
- token.PLUS: SUM,
- token.MINUS: SUM,
- token.SLASH: PRODUCT,
- token.ASTERISK: PRODUCT,
- token.LPAREN: CALL,
- token.LBRACKET: INDEX, // Highest priority
-}
-
-type (
- prefixParseFn func() ast.Expression
- infixParseFn func(ast.Expression) ast.Expression
-)
-
-type Parser struct {
- l *lexer.Lexer
-
- curToken token.Token
- peekToken token.Token
-
- errors []string
-
- prefixParseFns map[token.TokenType]prefixParseFn
- infixParseFns map[token.TokenType]infixParseFn
-}
-
-func New(l *lexer.Lexer) *Parser {
- p := &Parser{l: l, errors: []string{}}
-
- // Gotta set these niggas
- p.nextToken()
- p.nextToken()
-
- p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
- p.registerPrefix(token.STRING, p.parseStringLiteral)
- p.registerPrefix(token.IDENT, p.parseIdentifier)
- p.registerPrefix(token.INT, p.parseIntegerLiteral)
- p.registerPrefix(token.BANG, p.parsePrefixExpression)
- p.registerPrefix(token.MINUS, p.parsePrefixExpression)
- p.registerPrefix(token.TRUE, p.parseBoolean)
- p.registerPrefix(token.FALSE, p.parseBoolean)
- p.registerPrefix(token.LPAREN, p.parseGroupedExpression)
- p.registerPrefix(token.IF, p.parseIfExpression)
- p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
- p.registerPrefix(token.LBRACKET, p.parseArrayLiteral)
- p.registerPrefix(token.LBRACE, p.parseDictLiteral)
- p.registerPrefix(token.WHILE, p.parseWhileExpression)
-
- p.infixParseFns = make(map[token.TokenType]infixParseFn)
- p.registerInfix(token.PLUS, p.parseInfixExpression)
- p.registerInfix(token.MINUS, p.parseInfixExpression)
- p.registerInfix(token.SLASH, p.parseInfixExpression)
- p.registerInfix(token.ASTERISK, p.parseInfixExpression)
- p.registerInfix(token.EQ, p.parseInfixExpression)
- p.registerInfix(token.NOT_EQ, p.parseInfixExpression)
- p.registerInfix(token.LT, p.parseInfixExpression)
- p.registerInfix(token.GT, p.parseInfixExpression)
- p.registerInfix(token.LPAREN, p.parseCallExpression)
- p.registerInfix(token.LBRACKET, p.parseIndexExpression)
- p.registerInfix(token.ASSIGN, p.parseAssignmentExpression)
- return p
-}
-
-func (p *Parser) nextToken() {
- // only missing shuffle to make it a music player XD
- p.curToken = p.peekToken
- p.peekToken = p.l.NextToken()
-}
-
-func (p *Parser) ParseProgram() *ast.Program {
- program := &ast.Program{}
- program.Statements = []ast.Statement{}
-
- for !p.curTokenIs(token.EOF) {
- stmt := p.parseStatement()
- program.Statements = append(program.Statements, stmt)
-
- p.nextToken()
- }
- return program
-}
-
-func (p *Parser) parseStatement() ast.Statement {
- // Remember to add switch statements to the language
- switch p.curToken.Type {
- case token.LET:
- return p.parseLetStatment()
- case token.RETURN:
- return p.parseReturnStatement()
- default:
- return p.parseExpressionStatement()
- }
-}
-
-func (p *Parser) parseLetStatment() *ast.LetStatement {
- stmt := &ast.LetStatement{Token: p.curToken}
-
- if !p.expectPeek(token.IDENT) {
- return nil
- }
-
- stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
-
- if !p.expectPeek(token.ASSIGN) {
- return nil
- }
-
- p.nextToken()
-
- stmt.Value = p.parseExpression(LOWEST)
-
- if p.peekTokenIs(token.SEMICOLON) {
- p.nextToken()
- }
-
- return stmt
-}
-
-func (p *Parser) parseAssignmentExpression(exp ast.Expression) ast.Expression {
- switch node := exp.(type) {
- case *ast.Identifier, *ast.IndexExpression:
- default:
- msg := fmt.Sprintf("Tulitegemea kupata kitambulishi au array, badala yake tumepata: %T %#v", node, exp)
- p.errors = append(p.errors, msg)
- return nil
- }
-
- ae := &ast.AssignmentExpression{Token: p.curToken, Left: exp}
-
- p.nextToken()
-
- ae.Value = p.parseExpression(LOWEST)
-
- return ae
-}
-
-func (p *Parser) curTokenIs(t token.TokenType) bool {
- return p.curToken.Type == t
-}
-
-func (p *Parser) peekTokenIs(t token.TokenType) bool {
- return p.peekToken.Type == t
-}
-
-func (p *Parser) expectPeek(t token.TokenType) bool {
- if p.peekTokenIs(t) {
- p.nextToken()
- return true
- } else {
- p.peekError(t)
- return false
- }
-}
-
-func (p *Parser) Errors() []string {
- return p.errors
-}
-
-func (p *Parser) peekError(t token.TokenType) {
- msg := fmt.Sprintf("Tulitegemea kupata %s, badala yake tumepata %s", t, p.peekToken.Type)
- p.errors = append(p.errors, msg)
-}
-
-func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
- stmt := &ast.ReturnStatement{Token: p.curToken}
- p.nextToken()
-
- stmt.ReturnValue = p.parseExpression(LOWEST)
-
- if p.peekTokenIs(token.SEMICOLON) {
- p.nextToken()
- }
-
- return stmt
-}
-
-func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
- p.prefixParseFns[tokenType] = fn
-}
-
-func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
- p.infixParseFns[tokenType] = fn
-}
-
-func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
- stmt := &ast.ExpressionStatement{Token: p.curToken}
-
- stmt.Expression = p.parseExpression(LOWEST)
-
- if p.peekTokenIs(token.SEMICOLON) {
- p.nextToken()
- }
-
- return stmt
-}
-
-func (p *Parser) noPrefixParseFnError(t token.TokenType) {
- msg := fmt.Sprintf("Tumeshindwa kuparse %s", t)
- p.errors = append(p.errors, msg)
-}
-
-func (p *Parser) parseExpression(precedence int) ast.Expression {
- prefix := p.prefixParseFns[p.curToken.Type]
- if prefix == nil {
- p.noPrefixParseFnError(p.curToken.Type)
- return nil
- }
- leftExp := prefix()
-
- for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
- infix := p.infixParseFns[p.peekToken.Type]
- if infix == nil {
- return leftExp
- }
-
- p.nextToken()
- leftExp = infix(leftExp)
- }
- return leftExp
-
-}
-
-func (p *Parser) parseIdentifier() ast.Expression {
- return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
-}
-
-func (p *Parser) parseIntegerLiteral() ast.Expression {
- lit := &ast.IntegerLiteral{Token: p.curToken}
-
- value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
- if err != nil {
- msg := fmt.Sprintf("Hatuwezi kuparse %q kama namba", p.curToken.Literal)
- p.errors = append(p.errors, msg)
- return nil
- }
- lit.Value = value
-
- return lit
-}
-
-func (p *Parser) parsePrefixExpression() ast.Expression {
- expression := &ast.PrefixExpression{
- Token: p.curToken,
- Operator: p.curToken.Literal,
- }
-
- p.nextToken()
-
- expression.Right = p.parseExpression(PREFIX)
-
- return expression
-}
-
-func (p *Parser) peekPrecedence() int {
- if p, ok := precedences[p.peekToken.Type]; ok {
- return p
- }
- return LOWEST
-}
-
-func (p *Parser) curPrecedence() int {
- if p, ok := precedences[p.curToken.Type]; ok {
- return p
- }
-
- return LOWEST
-}
-
-func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
- expression := &ast.InfixExpression{
- Token: p.curToken,
- Operator: p.curToken.Literal,
- Left: left,
- }
-
- precedence := p.curPrecedence()
- p.nextToken()
- expression.Right = p.parseExpression(precedence)
- return expression
-}
-
-func (p *Parser) parseBoolean() ast.Expression {
- return &ast.Boolean{Token: p.curToken, Value: p.curTokenIs(token.TRUE)}
-}
-
-func (p *Parser) parseGroupedExpression() ast.Expression {
- p.nextToken()
-
- exp := p.parseExpression(LOWEST)
-
- if !p.expectPeek(token.RPAREN) {
- return nil
- }
-
- return exp
-}
-
-func (p *Parser) parseIfExpression() ast.Expression {
- expression := &ast.IfExpression{Token: p.curToken}
-
- if !p.expectPeek(token.LPAREN) {
- return nil
- }
-
- p.nextToken()
- expression.Condition = p.parseExpression(LOWEST)
-
- if !p.expectPeek(token.RPAREN) {
- return nil
- }
-
- if !p.expectPeek(token.LBRACE) {
- return nil
- }
-
- expression.Consequence = p.parseBlockStatement()
-
- if p.peekTokenIs(token.ELSE) {
- p.nextToken()
- if p.peekTokenIs(token.IF) {
- p.nextToken()
- expression.Alternative = &ast.BlockStatement{
- Statements: []ast.Statement{
- &ast.ExpressionStatement{
- Expression: p.parseIfExpression(),
- },
- },
- }
- return expression
- }
-
- if !p.expectPeek(token.LBRACE) {
- return nil
- }
-
- expression.Alternative = p.parseBlockStatement()
- }
-
- return expression
-}
-
-func (p *Parser) parseBlockStatement() *ast.BlockStatement {
- block := &ast.BlockStatement{Token: p.curToken}
- block.Statements = []ast.Statement{}
-
- p.nextToken()
-
- for !p.curTokenIs(token.RBRACE) && !p.curTokenIs(token.EOF) {
- stmt := p.parseStatement()
- block.Statements = append(block.Statements, stmt)
- p.nextToken()
- }
-
- return block
-}
-
-func (p *Parser) parseFunctionLiteral() ast.Expression {
- lit := &ast.FunctionLiteral{Token: p.curToken}
-
- if !p.expectPeek(token.LPAREN) {
- return nil
- }
-
- lit.Parameters = p.parseFunctionParameters()
-
- if !p.expectPeek(token.LBRACE) {
- return nil
- }
-
- lit.Body = p.parseBlockStatement()
-
- return lit
-}
-
-func (p *Parser) parseFunctionParameters() []*ast.Identifier {
- identifiers := []*ast.Identifier{}
-
- if p.peekTokenIs(token.RPAREN) {
- p.nextToken()
- return identifiers
- }
-
- p.nextToken()
-
- ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
- identifiers = append(identifiers, ident)
-
- for p.peekTokenIs(token.COMMA) {
- p.nextToken()
- p.nextToken()
- ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
- identifiers = append(identifiers, ident)
- }
-
- if !p.expectPeek(token.RPAREN) {
- return nil
- }
-
- return identifiers
-}
-
-func (p *Parser) parseCallExpression(function ast.Expression) ast.Expression {
- exp := &ast.CallExpression{Token: p.curToken, Function: function}
- exp.Arguments = p.parseExpressionList(token.RPAREN)
- return exp
-}
-
-func (p *Parser) parseStringLiteral() ast.Expression {
- return &ast.StringLiteral{Token: p.curToken, Value: p.curToken.Literal}
-}
-
-func (p *Parser) parseArrayLiteral() ast.Expression {
- array := &ast.ArrayLiteral{Token: p.curToken}
-
- array.Elements = p.parseExpressionList(token.RBRACKET)
-
- return array
-}
-
-func (p *Parser) parseExpressionList(end token.TokenType) []ast.Expression {
- list := []ast.Expression{}
-
- if p.peekTokenIs(end) {
- p.nextToken()
- return list
- }
-
- p.nextToken()
- list = append(list, p.parseExpression(LOWEST))
-
- for p.peekTokenIs(token.COMMA) {
- p.nextToken()
- p.nextToken()
- list = append(list, p.parseExpression(LOWEST))
- }
-
- if !p.expectPeek(end) {
- return nil
- }
- return list
-}
-
-func (p *Parser) parseIndexExpression(left ast.Expression) ast.Expression {
- exp := &ast.IndexExpression{Token: p.curToken, Left: left}
-
- p.nextToken()
- exp.Index = p.parseExpression(LOWEST)
- if !p.expectPeek(token.RBRACKET) {
- return nil
- }
-
- return exp
-}
-
-func (p *Parser) parseDictLiteral() ast.Expression {
- dict := &ast.DictLiteral{Token: p.curToken}
- dict.Pairs = make(map[ast.Expression]ast.Expression)
-
- for !p.peekTokenIs(token.RBRACE) {
- p.nextToken()
- key := p.parseExpression(LOWEST)
-
- if !p.expectPeek(token.COLON) {
- return nil
- }
-
- p.nextToken()
- value := p.parseExpression(LOWEST)
-
- dict.Pairs[key] = value
-
- if !p.peekTokenIs(token.RBRACE) && !p.expectPeek(token.COMMA) {
- return nil
- }
- }
-
- if !p.expectPeek(token.RBRACE) {
- return nil
- }
-
- return dict
-}
-
-func (p *Parser) parseWhileExpression() ast.Expression {
- expression := &ast.WhileExpression{Token: p.curToken}
-
- if !p.expectPeek(token.LPAREN) {
- return nil
- }
-
- p.nextToken()
- expression.Condition = p.parseExpression(LOWEST)
-
- if !p.expectPeek(token.RPAREN) {
- return nil
- }
-
- if !p.expectPeek(token.LBRACE) {
- return nil
- }
-
- expression.Consequence = p.parseBlockStatement()
-
- return expression
-}
diff --git a/src/Makefile b/src/Makefile
new file mode 100644
index 0000000..08ffece
--- /dev/null
+++ b/src/Makefile
@@ -0,0 +1,23 @@
+VERSION=0.2.0
+
+build_linux:
+ env GOOS=linux GOARCH=amd64 go build -o nuru
+ tar -zcvf nuru_linux_amd64_v${VERSION}.tar.gz nuru
+ rm nuru
+
+build_windows:
+ env GOOS=windows GOARCH=amd64 go build -o nuru_windows_amd64_v${VERSION}.exe
+
+build_android:
+ env GOOS=android GOARCH=arm64 go build -o nuru
+ tar -zcvf nuru_linux_amd64_v${VERSION}.tar.gz nuru
+ rm nuru
+
+test:
+ go test ./parser/
+ go test ./ast/
+ go test ./evaluator/
+ go test ./object/
+
+clean:
+ go clean
\ No newline at end of file
diff --git a/ast/ast.go b/src/ast/ast.go
similarity index 67%
rename from ast/ast.go
rename to src/ast/ast.go
index 241a169..3f6e407 100644
--- a/ast/ast.go
+++ b/src/ast/ast.go
@@ -179,13 +179,13 @@ func (ie *IfExpression) expressionNode() {}
func (ie *IfExpression) TokenLiteral() string { return ie.Token.Literal }
func (ie *IfExpression) String() string {
var out bytes.Buffer
- out.WriteString("if")
+ out.WriteString("kama")
out.WriteString(ie.Condition.String())
out.WriteString(" ")
out.WriteString(ie.Consequence.String())
if ie.Alternative != nil {
- out.WriteString("else")
+ out.WriteString("sivyo")
out.WriteString(ie.Alternative.String())
}
@@ -360,10 +360,169 @@ func (we *WhileExpression) TokenLiteral() string { return we.Token.Literal }
func (we *WhileExpression) String() string {
var out bytes.Buffer
- out.WriteString("while")
+ out.WriteString("wakati")
out.WriteString(we.Condition.String())
out.WriteString(" ")
out.WriteString(we.Consequence.String())
return out.String()
}
+
+type Null struct {
+ Token token.Token
+}
+
+func (n *Null) expressionNode() {}
+func (n *Null) TokenLiteral() string { return n.Token.Literal }
+func (n *Null) String() string { return n.Token.Literal }
+
+type Break struct {
+ Statement
+ Token token.Token // the 'break' token
+}
+
+func (b *Break) expressionNode() {}
+func (b *Break) TokenLiteral() string { return b.Token.Literal }
+func (b *Break) String() string { return b.Token.Literal }
+
+type Continue struct {
+ Statement
+ Token token.Token // the 'continue' token
+}
+
+func (c *Continue) expressionNode() {}
+func (c *Continue) TokenLiteral() string { return c.Token.Literal }
+func (c *Continue) String() string { return c.Token.Literal }
+
+type PostfixExpression struct {
+ Token token.Token
+ Operator string
+}
+
+func (pe *PostfixExpression) expressionNode() {}
+func (pe *PostfixExpression) TokenLiteral() string { return pe.Token.Literal }
+func (pe *PostfixExpression) String() string {
+ var out bytes.Buffer
+ out.WriteString("(")
+ out.WriteString(pe.Token.Literal)
+ out.WriteString(pe.Operator)
+ out.WriteString(")")
+ return out.String()
+}
+
+type FloatLiteral struct {
+ Token token.Token
+ Value float64
+}
+
+func (fl *FloatLiteral) expressionNode() {}
+func (fl *FloatLiteral) TokenLiteral() string { return fl.Token.Literal }
+func (fl *FloatLiteral) String() string { return fl.Token.Literal }
+
+type For struct {
+ Expression
+ Token token.Token
+ Identifier string // "i"
+ StarterName *Identifier // i = 0
+ StarterValue Expression
+ Closer Expression // i++
+ Condition Expression // i < 1
+ Block *BlockStatement
+}
+
+type ForIn struct {
+ Expression
+ Token token.Token
+ Key string
+ Value string
+ Iterable Expression
+ Block *BlockStatement
+}
+
+func (fi *ForIn) expressionNode() {}
+func (fi *ForIn) TokenLiteral() string { return fi.Token.Literal }
+func (fi *ForIn) String() string {
+ var out bytes.Buffer
+
+ out.WriteString("kwa ")
+ if fi.Key != "" {
+ out.WriteString(fi.Key + ", ")
+ }
+ out.WriteString(fi.Value + " ")
+ out.WriteString("ktk ")
+ out.WriteString(fi.Iterable.String() + " {\n")
+ out.WriteString("\t" + fi.Block.String())
+ out.WriteString("\n}")
+
+ return out.String()
+}
+
+type CaseExpression struct {
+ Token token.Token
+ Default bool
+ Expr []Expression
+ Block *BlockStatement
+}
+
+func (ce *CaseExpression) expressionNode() {}
+func (ce *CaseExpression) TokenLiteral() string { return ce.Token.Literal }
+func (ce *CaseExpression) String() string {
+ var out bytes.Buffer
+
+ if ce.Default {
+ out.WriteString("kawaida ")
+ } else {
+ out.WriteString("ikiwa ")
+
+ tmp := []string{}
+ for _, exp := range ce.Expr {
+ tmp = append(tmp, exp.String())
+ }
+ out.WriteString(strings.Join(tmp, ","))
+ }
+ out.WriteString(ce.Block.String())
+ return out.String()
+}
+
+type SwitchExpression struct {
+ Token token.Token
+ Value Expression
+ Choices []*CaseExpression
+}
+
+func (se *SwitchExpression) expressionNode() {}
+func (se *SwitchExpression) TokenLiteral() string { return se.Token.Literal }
+func (se *SwitchExpression) String() string {
+ var out bytes.Buffer
+ out.WriteString("\nbadili (")
+ out.WriteString(se.Value.String())
+ out.WriteString(")\n{\n")
+
+ for _, tmp := range se.Choices {
+ if tmp != nil {
+ out.WriteString(tmp.String())
+ }
+ }
+ out.WriteString("}\n")
+
+ return out.String()
+}
+
+type MethodExpression struct {
+ Expression
+ Token token.Token
+ Object Expression
+ Method Expression
+ Arguments []Expression
+}
+
+func (me *MethodExpression) expressionNode() {}
+func (me *MethodExpression) TokenLiteral() string { return me.Token.Literal }
+func (me *MethodExpression) String() string {
+ var out bytes.Buffer
+ out.WriteString(me.Object.String())
+ out.WriteString(".")
+ out.WriteString(me.Method.String())
+
+ return out.String()
+}
diff --git a/ast/ast_test.go b/src/ast/ast_test.go
similarity index 81%
rename from ast/ast_test.go
rename to src/ast/ast_test.go
index be9d6e3..c5ae5fa 100644
--- a/ast/ast_test.go
+++ b/src/ast/ast_test.go
@@ -10,7 +10,7 @@ func TestString(t *testing.T) {
program := &Program{
Statements: []Statement{
&LetStatement{
- Token: token.Token{Type: token.LET, Literal: "acha"},
+ Token: token.Token{Type: token.LET, Literal: "fanya"},
Name: &Identifier{
Token: token.Token{Type: token.IDENT, Literal: "myVar"},
Value: "myVar",
@@ -23,7 +23,7 @@ func TestString(t *testing.T) {
},
}
- if program.String() != "acha myVar = anotherVar;" {
+ if program.String() != "fanya myVar = anotherVar;" {
t.Errorf("program.String() wrong. got=%q", program.String())
}
}
diff --git a/src/evaluator/bang.go b/src/evaluator/bang.go
new file mode 100644
index 0000000..de9b340
--- /dev/null
+++ b/src/evaluator/bang.go
@@ -0,0 +1,16 @@
+package evaluator
+
+import "github.com/AvicennaJr/Nuru/object"
+
+func evalBangOperatorExpression(right object.Object) object.Object {
+ switch right {
+ case TRUE:
+ return FALSE
+ case FALSE:
+ return TRUE
+ case NULL:
+ return TRUE
+ default:
+ return FALSE
+ }
+}
\ No newline at end of file
diff --git a/src/evaluator/block.go b/src/evaluator/block.go
new file mode 100644
index 0000000..a7c9447
--- /dev/null
+++ b/src/evaluator/block.go
@@ -0,0 +1,23 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalBlockStatement(block *ast.BlockStatement, env *object.Environment) object.Object {
+ var result object.Object
+
+ for _, statment := range block.Statements {
+ result = Eval(statment, env)
+
+ if result != nil {
+ rt := result.Type()
+ if rt == object.RETURN_VALUE_OBJ || rt == object.ERROR_OBJ || rt == object.CONTINUE_OBJ || rt == object.BREAK_OBJ {
+ return result
+ }
+ }
+ }
+
+ return result
+}
diff --git a/src/evaluator/builtins.go b/src/evaluator/builtins.go
new file mode 100644
index 0000000..fbf9781
--- /dev/null
+++ b/src/evaluator/builtins.go
@@ -0,0 +1,199 @@
+package evaluator
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "math"
+ "os"
+ "strconv"
+ "strings"
+
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+var builtins = map[string]*object.Builtin{
+ "idadi": {
+ Fn: func(args ...object.Object) object.Object {
+ if len(args) != 1 {
+ return newError("Hoja hazilingani, tunahitaji=1, tumepewa=%d", len(args))
+ }
+
+ switch arg := args[0].(type) {
+ case *object.Array:
+ return &object.Integer{Value: int64(len(arg.Elements))}
+ case *object.String:
+ return &object.Integer{Value: int64(len(arg.Value))}
+ default:
+ return newError("Samahani, hii function haitumiki na %s", args[0].Type())
+ }
+ },
+ },
+ "jumla": {
+ Fn: func(args ...object.Object) object.Object {
+ if len(args) != 1 {
+ return newError("Hoja hazilingani, tunahitaji=1, tumepewa=%d", len(args))
+ }
+
+ switch arg := args[0].(type) {
+ case *object.Array:
+
+ var sums float64
+ for _, num := range arg.Elements {
+
+ if num.Type() != object.INTEGER_OBJ && num.Type() != object.FLOAT_OBJ {
+ return newError("Samahani namba tu zinahitajika")
+ } else {
+ if num.Type() == object.INTEGER_OBJ {
+ no, _ := strconv.Atoi(num.Inspect())
+ floatnum := float64(no)
+ sums += floatnum
+ } else if num.Type() == object.FLOAT_OBJ {
+ no, _ := strconv.ParseFloat(num.Inspect(), 64)
+ sums += no
+ }
+
+ }
+ }
+
+ if math.Mod(sums, 1) == 0 {
+ return &object.Integer{Value: int64(sums)}
+ }
+
+ return &object.Float{Value: float64(sums)}
+
+ default:
+ return newError("Samahani, hii function haitumiki na %s", args[0].Type())
+ }
+ },
+ },
+ "yamwisho": {
+ Fn: func(args ...object.Object) object.Object {
+ if len(args) != 1 {
+ return newError("Samahani, tunahitaji Hoja moja tu, wewe umeweka %d", len(args))
+ }
+ if args[0].Type() != object.ARRAY_OBJ {
+ return newError("Samahani, hii function haitumiki na %s", args[0].Type())
+ }
+
+ arr := args[0].(*object.Array)
+ length := len(arr.Elements)
+ if length > 0 {
+ return arr.Elements[length-1]
+ }
+
+ return NULL
+ },
+ },
+ "sukuma": {
+ Fn: func(args ...object.Object) object.Object {
+ if len(args) != 2 {
+ return newError("Samahani, tunahitaji Hoja 2, wewe umeweka %d", len(args))
+ }
+ if args[0].Type() != object.ARRAY_OBJ {
+ return newError("Samahani, hii function haitumiki na %s", args[0].Type())
+ }
+
+ arr := args[0].(*object.Array)
+ length := len(arr.Elements)
+
+ newElements := make([]object.Object, length+1)
+ copy(newElements, arr.Elements)
+ newElements[length] = args[1]
+
+ return &object.Array{Elements: newElements}
+ },
+ },
+ "jaza": {
+ Fn: func(args ...object.Object) object.Object {
+
+ if len(args) > 1 {
+ return newError("Samahani, hii function inapokea hoja 0 au 1, wewe umeweka %d", len(args))
+ }
+
+ if len(args) > 0 && args[0].Type() != object.STRING_OBJ {
+ return newError(fmt.Sprintf(`Tafadhali tumia alama ya nukuu: "%s"`, args[0].Inspect()))
+ }
+ if len(args) == 1 {
+ prompt := args[0].(*object.String).Value
+ fmt.Fprint(os.Stdout, prompt)
+ }
+
+ buffer := bufio.NewReader(os.Stdin)
+
+ line, _, err := buffer.ReadLine()
+ if err != nil && err != io.EOF {
+ return newError("Nimeshindwa kusoma uliyo yajaza")
+ }
+
+ return &object.String{Value: string(line)}
+ },
+ },
+ "andika": {
+ Fn: func(args ...object.Object) object.Object {
+ if len(args) == 0 {
+ fmt.Println("")
+ } else {
+ var arr []string
+ for _, arg := range args {
+ if arg == nil {
+ return newError("Hauwezi kufanya operesheni hii")
+ }
+ arr = append(arr, arg.Inspect())
+ }
+ str := strings.Join(arr, " ")
+ print(str + "\n")
+ }
+ return nil
+ },
+ },
+ "aina": {
+ Fn: func(args ...object.Object) object.Object {
+ if len(args) != 1 {
+ return newError("Samahani, tunahitaji Hoja 1, wewe umeweka %d", len(args))
+ }
+
+ return &object.String{Value: string(args[0].Type())}
+ },
+ },
+ "fungua": {
+ Fn: func(args ...object.Object) object.Object {
+
+ if len(args) > 2 {
+ return newError("Samahani, Hatuhitaji hoja zaidi ya 2, wewe umeweka %d", len(args))
+ }
+ filename := args[0].(*object.String).Value
+ mode := os.O_RDONLY
+ if len(args) == 2 {
+ fileMode := args[1].(*object.String).Value
+ switch fileMode {
+ case "r":
+ mode = os.O_RDONLY
+ // still buggy, will work on this soon
+ // case "w":
+ // mode = os.O_WRONLY
+ // err := os.Remove(filename)
+ // if err != nil {
+ // return &object.Null{}
+ // }
+ // case "a":
+ // mode = os.O_APPEND
+ default:
+ return newError("Tumeshindwa kufungua file na mode %s", fileMode)
+ }
+ }
+ file, err := os.OpenFile(filename, os.O_CREATE|mode, 0644)
+ if err != nil {
+ return &object.Null{}
+ }
+ var reader *bufio.Reader
+ var writer *bufio.Writer
+ if mode == os.O_RDONLY {
+ reader = bufio.NewReader(file)
+ } else {
+ writer = bufio.NewWriter(file)
+ }
+ return &object.File{Filename: filename, Reader: reader, Writer: writer, Handle: file}
+ },
+ },
+}
diff --git a/src/evaluator/dict.go b/src/evaluator/dict.go
new file mode 100644
index 0000000..05bbdcb
--- /dev/null
+++ b/src/evaluator/dict.go
@@ -0,0 +1,32 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalDictLiteral(node *ast.DictLiteral, env *object.Environment) object.Object {
+ pairs := make(map[object.HashKey]object.DictPair)
+
+ for keyNode, valueNode := range node.Pairs {
+ key := Eval(keyNode, env)
+ if isError(key) {
+ return key
+ }
+
+ hashKey, ok := key.(object.Hashable)
+ if !ok {
+ return newError("Mstari %d: Hashing imeshindikana: %s", node.Token.Line, key.Type())
+ }
+
+ value := Eval(valueNode, env)
+ if isError(value) {
+ return value
+ }
+
+ hashed := hashKey.HashKey()
+ pairs[hashed] = object.DictPair{Key: key, Value: value}
+ }
+
+ return &object.Dict{Pairs: pairs}
+}
\ No newline at end of file
diff --git a/src/evaluator/evaluator.go b/src/evaluator/evaluator.go
new file mode 100644
index 0000000..b7bb4a5
--- /dev/null
+++ b/src/evaluator/evaluator.go
@@ -0,0 +1,384 @@
+package evaluator
+
+import (
+ "fmt"
+
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+var (
+ NULL = &object.Null{}
+ TRUE = &object.Boolean{Value: true}
+ FALSE = &object.Boolean{Value: false}
+ BREAK = &object.Break{}
+ CONTINUE = &object.Continue{}
+)
+
+func Eval(node ast.Node, env *object.Environment) object.Object {
+ switch node := node.(type) {
+ case *ast.Program:
+ return evalProgram(node, env)
+
+ case *ast.ExpressionStatement:
+ return Eval(node.Expression, env)
+
+ case *ast.IntegerLiteral:
+ return &object.Integer{Value: node.Value}
+
+ case *ast.FloatLiteral:
+ return &object.Float{Value: node.Value}
+
+ case *ast.Boolean:
+ return nativeBoolToBooleanObject(node.Value)
+
+ case *ast.PrefixExpression:
+ right := Eval(node.Right, env)
+ if isError(right) {
+ return right
+ }
+ return evalPrefixExpression(node.Operator, right, node.Token.Line)
+
+ case *ast.InfixExpression:
+ left := Eval(node.Left, env)
+ if isError(left) {
+ return left
+ }
+ right := Eval(node.Right, env)
+ if isError(right) {
+ return right
+ }
+ return evalInfixExpression(node.Operator, left, right, node.Token.Line)
+ case *ast.PostfixExpression:
+ return evalPostfixExpression(env, node.Operator, node)
+
+ case *ast.BlockStatement:
+ return evalBlockStatement(node, env)
+
+ case *ast.IfExpression:
+ return evalIfExpression(node, env)
+
+ case *ast.ReturnStatement:
+ val := Eval(node.ReturnValue, env)
+ if isError(val) {
+ return val
+ }
+ return &object.ReturnValue{Value: val}
+
+ case *ast.LetStatement:
+ val := Eval(node.Value, env)
+ if isError(val) {
+ return val
+ }
+
+ env.Set(node.Name.Value, val)
+
+ case *ast.Identifier:
+ return evalIdentifier(node, env)
+
+ case *ast.FunctionLiteral:
+ params := node.Parameters
+ body := node.Body
+ return &object.Function{Parameters: params, Env: env, Body: body}
+
+ case *ast.MethodExpression:
+ return evalMethodExpression(node, env)
+
+ case *ast.CallExpression:
+ function := Eval(node.Function, env)
+ if isError(function) {
+ return function
+ }
+ args := evalExpressions(node.Arguments, env)
+ if len(args) == 1 && isError(args[0]) {
+ return args[0]
+ }
+ return applyFunction(function, args, node.Token.Line)
+ case *ast.StringLiteral:
+ return &object.String{Value: node.Value}
+
+ case *ast.ArrayLiteral:
+ elements := evalExpressions(node.Elements, env)
+ if len(elements) == 1 && isError(elements[0]) {
+ return elements[0]
+ }
+ return &object.Array{Elements: elements}
+ case *ast.IndexExpression:
+ left := Eval(node.Left, env)
+ if isError(left) {
+ return left
+ }
+ index := Eval(node.Index, env)
+ if isError(index) {
+ return index
+ }
+ return evalIndexExpression(left, index, node.Token.Line)
+ case *ast.DictLiteral:
+ return evalDictLiteral(node, env)
+ case *ast.WhileExpression:
+ return evalWhileExpression(node, env)
+ case *ast.Break:
+ return evalBreak(node)
+ case *ast.Continue:
+ return evalContinue(node)
+ case *ast.SwitchExpression:
+ return evalSwitchStatement(node, env)
+ case *ast.Null:
+ return NULL
+ // case *ast.For:
+ // return evalForExpression(node, env)
+ case *ast.ForIn:
+ return evalForInExpression(node, env, node.Token.Line)
+ case *ast.AssignmentExpression:
+ left := Eval(node.Left, env)
+ if isError(left) {
+ return left
+ }
+
+ value := Eval(node.Value, env)
+ if isError(value) {
+ return value
+ }
+
+ // This is an easy way to assign operators like +=, -= etc
+ // I'm surprised it work at the first try lol
+ // basically separate the += to + and =, take the + only and
+ // then perform the operation as normal
+ op := node.Token.Literal
+ if len(op) >= 2 {
+ op = op[:len(op)-1]
+ value = evalInfixExpression(op, left, value, node.Token.Line)
+ if isError(value) {
+ return value
+ }
+ }
+
+ if ident, ok := node.Left.(*ast.Identifier); ok {
+ env.Set(ident.Value, value)
+ } else if ie, ok := node.Left.(*ast.IndexExpression); ok {
+ obj := Eval(ie.Left, env)
+ if isError(obj) {
+ return obj
+ }
+
+ if array, ok := obj.(*object.Array); ok {
+ index := Eval(ie.Index, env)
+ if isError(index) {
+ return index
+ }
+ if idx, ok := index.(*object.Integer); ok {
+ if int(idx.Value) > len(array.Elements) {
+ return newError("Index imezidi idadi ya elements")
+ }
+ array.Elements[idx.Value] = value
+ } else {
+ return newError("Hauwezi kufanya opereshen hii na %#v", index)
+ }
+ } else if hash, ok := obj.(*object.Dict); ok {
+ key := Eval(ie.Index, env)
+ if isError(key) {
+ return key
+ }
+ if hashKey, ok := key.(object.Hashable); ok {
+ hashed := hashKey.HashKey()
+ hash.Pairs[hashed] = object.DictPair{Key: key, Value: value}
+ } else {
+ return newError("Hauwezi kufanya opereshen hii na %T", key)
+ }
+ } else {
+ return newError("%T haifanyi operation hii", obj)
+ }
+ } else {
+ return newError("Tumia neno kama variable, sio %T", left)
+ }
+
+ }
+
+ return nil
+}
+
+func evalProgram(program *ast.Program, env *object.Environment) object.Object {
+ var result object.Object
+
+ for _, statment := range program.Statements {
+ result = Eval(statment, env)
+
+ switch result := result.(type) {
+ case *object.ReturnValue:
+ return result.Value
+ case *object.Error:
+ return result
+ }
+ }
+
+ return result
+}
+
+func nativeBoolToBooleanObject(input bool) *object.Boolean {
+ if input {
+ return TRUE
+ }
+ return FALSE
+}
+
+func isTruthy(obj object.Object) bool {
+ switch obj {
+ case NULL:
+ return false
+ case TRUE:
+ return true
+ case FALSE:
+ return false
+ default:
+ return true
+ }
+}
+
+func newError(format string, a ...interface{}) *object.Error {
+ format = fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, format)
+ return &object.Error{Message: fmt.Sprintf(format, a...)}
+}
+
+func isError(obj object.Object) bool {
+ if obj != nil {
+ return obj.Type() == object.ERROR_OBJ
+ }
+
+ return false
+}
+
+func evalExpressions(exps []ast.Expression, env *object.Environment) []object.Object {
+ var result []object.Object
+
+ for _, e := range exps {
+ evaluated := Eval(e, env)
+ if isError(evaluated) {
+ return []object.Object{evaluated}
+ }
+
+ result = append(result, evaluated)
+ }
+
+ return result
+}
+
+func applyFunction(fn object.Object, args []object.Object, line int) object.Object {
+ switch fn := fn.(type) {
+ case *object.Function:
+ extendedEnv := extendedFunctionEnv(fn, args)
+ evaluated := Eval(fn.Body, extendedEnv)
+ return unwrapReturnValue(evaluated)
+ case *object.Builtin:
+ if result := fn.Fn(args...); result != nil {
+ return result
+ }
+ return NULL
+ default:
+ return newError("Mstari %d: Hii sio function: %s", line, fn.Type())
+ }
+
+}
+
+func extendedFunctionEnv(fn *object.Function, args []object.Object) *object.Environment {
+ env := object.NewEnclosedEnvironment(fn.Env)
+
+ for paramIdx, param := range fn.Parameters {
+ if paramIdx < len(args) {
+ env.Set(param.Value, args[paramIdx])
+ }
+ }
+ return env
+}
+
+func unwrapReturnValue(obj object.Object) object.Object {
+ if returnValue, ok := obj.(*object.ReturnValue); ok {
+ return returnValue.Value
+ }
+
+ return obj
+}
+
+func evalBreak(node *ast.Break) object.Object {
+ return BREAK
+}
+
+func evalContinue(node *ast.Continue) object.Object {
+ return CONTINUE
+}
+
+// func evalForExpression(fe *ast.For, env *object.Environment) object.Object {
+// obj, ok := env.Get(fe.Identifier)
+// defer func() { // stay safe and not reassign an existing variable
+// if ok {
+// env.Set(fe.Identifier, obj)
+// }
+// }()
+// val := Eval(fe.StarterValue, env)
+// if isError(val) {
+// return val
+// }
+
+// env.Set(fe.StarterName.Value, val)
+
+// // err := Eval(fe.Starter, env)
+// // if isError(err) {
+// // return err
+// // }
+// for {
+// evaluated := Eval(fe.Condition, env)
+// if isError(evaluated) {
+// return evaluated
+// }
+// if !isTruthy(evaluated) {
+// break
+// }
+// res := Eval(fe.Block, env)
+// if isError(res) {
+// return res
+// }
+// if res.Type() == object.BREAK_OBJ {
+// break
+// }
+// if res.Type() == object.CONTINUE_OBJ {
+// err := Eval(fe.Closer, env)
+// if isError(err) {
+// return err
+// }
+// continue
+// }
+// if res.Type() == object.RETURN_VALUE_OBJ {
+// return res
+// }
+// err := Eval(fe.Closer, env)
+// if isError(err) {
+// return err
+// }
+// }
+// return NULL
+// }
+
+func loopIterable(next func() (object.Object, object.Object), env *object.Environment, fi *ast.ForIn) object.Object {
+ k, v := next()
+ for k != nil && v != nil {
+ env.Set(fi.Key, k)
+ env.Set(fi.Value, v)
+ res := Eval(fi.Block, env)
+ if isError(res) {
+ return res
+ }
+ if res != nil {
+ if res.Type() == object.BREAK_OBJ {
+ break
+ }
+ if res.Type() == object.CONTINUE_OBJ {
+ k, v = next()
+ continue
+ }
+ if res.Type() == object.RETURN_VALUE_OBJ {
+ return res
+ }
+ }
+ k, v = next()
+ }
+ return NULL
+}
diff --git a/evaluator/evaluator_test.go b/src/evaluator/evaluator_test.go
similarity index 77%
rename from evaluator/evaluator_test.go
rename to src/evaluator/evaluator_test.go
index e2d9771..4bd2201 100644
--- a/evaluator/evaluator_test.go
+++ b/src/evaluator/evaluator_test.go
@@ -102,6 +102,22 @@ func testIntegerObject(t *testing.T, obj object.Object, expected int64) bool {
return true
}
+func testFloatObject(t *testing.T, obj object.Object, expected float64) bool {
+ result, ok := obj.(*object.Float)
+
+ if !ok {
+ t.Errorf("Object is not Float, got=%T(%+v)", obj, obj)
+ return false
+ }
+
+ if result.Value != expected {
+ t.Errorf("object has wrong value. got=%f, want=%f", result.Value, expected)
+ return false
+ }
+
+ return true
+}
+
func testBooleanObject(t *testing.T, obj object.Object, expected bool) bool {
result, ok := obj.(*object.Boolean)
if !ok {
@@ -174,27 +190,27 @@ func TestErrorHandling(t *testing.T) {
}{
{
"5 + kweli",
- "Aina Hazilingani: NAMBA + BOOLEAN",
+ "Mstari 0: Aina Hazilingani: NAMBA + BOOLEAN",
},
{
"5 + kweli; 5;",
- "Aina Hazilingani: NAMBA + BOOLEAN",
+ "Mstari 0: Aina Hazilingani: NAMBA + BOOLEAN",
},
{
"-kweli",
- "Operesheni Haielweki: -BOOLEAN",
+ "Mstari 0: Operesheni Haielweki: -BOOLEAN",
},
{
"kweli + sikweli",
- "Operesheni Haielweki: BOOLEAN + BOOLEAN",
+ "Mstari 0: Operesheni Haielweki: BOOLEAN + BOOLEAN",
},
{
"5; kweli + sikweli; 5",
- "Operesheni Haielweki: BOOLEAN + BOOLEAN",
+ "Mstari 0: Operesheni Haielweki: BOOLEAN + BOOLEAN",
},
{
"kama (10 > 1) { kweli + sikweli;}",
- "Operesheni Haielweki: BOOLEAN + BOOLEAN",
+ "Mstari 0: Operesheni Haielweki: BOOLEAN + BOOLEAN",
},
{
`
@@ -206,19 +222,19 @@ kama (10 > 1) {
rudisha 1;
}
`,
- "Operesheni Haielweki: BOOLEAN + BOOLEAN",
+ "Mstari 3: Operesheni Haielweki: BOOLEAN + BOOLEAN",
},
{
"bangi",
- "Neno Halifahamiki: bangi",
+ "Mstari 0: Neno Halifahamiki: bangi",
},
{
`"Habari" - "Habari"`,
- "Operesheni Haielweki: NENO - NENO",
+ "Mstari 0: Operesheni Haielweki: NENO - NENO",
},
{
- `{"jina": "Avi"}[fn(x) {x}];`,
- "Samahani, FUNCTION haitumiki kama key",
+ `{"jina": "Avi"}[unda(x) {x}];`,
+ "Mstari 0: Samahani, UNDO (FUNCTION) haitumiki kama key",
},
}
@@ -232,7 +248,7 @@ kama (10 > 1) {
}
if errObj.Message != fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, tt.expectedMessage) {
- t.Errorf("wrong error message, expected=%q, got=%q", tt.expectedMessage, errObj.Message)
+ t.Errorf("wrong error message, expected=%q, got=%q", fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, tt.expectedMessage), errObj.Message)
}
}
}
@@ -242,10 +258,10 @@ func TestLetStatement(t *testing.T) {
input string
expected int64
}{
- {"acha a = 5; a;", 5},
- {"acha a = 5 * 5; a;", 25},
- {"acha a = 5; acha b = a; b;", 5},
- {"acha a = 5; acha b = a; acha c = a + b + 5; c;", 15},
+ {"fanya a = 5; a;", 5},
+ {"fanya a = 5 * 5; a;", 25},
+ {"fanya a = 5; fanya b = a; b;", 5},
+ {"fanya a = 5; fanya b = a; fanya c = a + b + 5; c;", 15},
}
for _, tt := range tests {
@@ -254,26 +270,26 @@ func TestLetStatement(t *testing.T) {
}
func TestFunctionObject(t *testing.T) {
- input := "fn(x) { x + 2 ;};"
+ input := "unda(x) { x + 2 ;};"
evaluated := testEval(input)
- fn, ok := evaluated.(*object.Function)
+ unda, ok := evaluated.(*object.Function)
if !ok {
t.Fatalf("object is not a Function, got=%T(%+v)", evaluated, evaluated)
}
- if len(fn.Parameters) != 1 {
- t.Fatalf("function haas wrong paramters,Parameters=%+v", fn.Parameters)
+ if len(unda.Parameters) != 1 {
+ t.Fatalf("function haas wrong paramters,Parameters=%+v", unda.Parameters)
}
- if fn.Parameters[0].String() != "x" {
- t.Fatalf("parameter is not x, got=%q", fn.Parameters[0])
+ if unda.Parameters[0].String() != "x" {
+ t.Fatalf("parameter is not x, got=%q", unda.Parameters[0])
}
expectedBody := "(x + 2)"
- if fn.Body.String() != expectedBody {
- t.Fatalf("body is not %q, got=%q", expectedBody, fn.Body.String())
+ if unda.Body.String() != expectedBody {
+ t.Fatalf("body is not %q, got=%q", expectedBody, unda.Body.String())
}
}
@@ -282,12 +298,12 @@ func TestFunctionApplication(t *testing.T) {
input string
expected int64
}{
- {"acha mfano = fn(x) {x;}; mfano(5);", 5},
- {"acha mfano = fn(x) {rudisha x;}; mfano(5);", 5},
- {"acha double = fn(x) { x * 2;}; double(5);", 10},
- {"acha add = fn(x, y) {x + y;}; add(5,5);", 10},
- {"acha add = fn(x, y) {x + y;}; add(5 + 5, add(5, 5));", 20},
- {"fn(x) {x;}(5)", 5},
+ {"fanya mfano = unda(x) {x;}; mfano(5);", 5},
+ {"fanya mfano = unda(x) {rudisha x;}; mfano(5);", 5},
+ {"fanya double = unda(x) { x * 2;}; double(5);", 10},
+ {"fanya add = unda(x, y) {x + y;}; add(5,5);", 10},
+ {"fanya add = unda(x, y) {x + y;}; add(5 + 5, add(5, 5));", 20},
+ {"unda(x) {x;}(5)", 5},
}
for _, tt := range tests {
@@ -297,11 +313,11 @@ func TestFunctionApplication(t *testing.T) {
func TestClosures(t *testing.T) {
input := `
-acha newAdder = fn(x) {
- fn(y) { x + y};
+fanya newAdder = unda(x) {
+ unda(y) { x + y};
};
-acha addTwo = newAdder(2);
+fanya addTwo = newAdder(2);
addTwo(2);
`
testIntegerObject(t, testEval(input), 4)
@@ -346,14 +362,24 @@ func TestBuiltinFunctions(t *testing.T) {
{`idadi("hello world")`, 11},
{`idadi(1)`, "Samahani, hii function haitumiki na NAMBA"},
{`idadi("one", "two")`, "Hoja hazilingani, tunahitaji=1, tumepewa=2"},
+ {`jumla()`, "Hoja hazilingani, tunahitaji=1, tumepewa=0"},
+ {`jumla("")`, "Samahani, hii function haitumiki na NENO"},
+ {`jumla(1)`, "Samahani, hii function haitumiki na NAMBA"},
+ {`jumla([1,2,3])`, 6},
+ {`jumla([1,2,3.4])`, 6.4},
+ {`jumla([1.1,2.5,3.4])`, 7},
+ {`jumla([1.1,2.5,"q"])`, "Samahani namba tu zinahitajika"},
}
for _, tt := range tests {
evaluated := testEval(tt.input)
-
+
switch expected := tt.expected.(type) {
case int:
testIntegerObject(t, evaluated, int64(expected))
+ case float64:
+ testFloatObject(t, evaluated, float64(expected))
+
case string:
errObj, ok := evaluated.(*object.Error)
if !ok {
@@ -403,11 +429,11 @@ func TestArrayIndexExpressions(t *testing.T) {
3,
},
{
- "acha i = 0; [1][i];",
+ "fanya i = 0; [1][i];",
1,
},
{
- "acha myArr = [1, 2, 3]; myArr[2];",
+ "fanya myArr = [1, 2, 3]; myArr[2];",
3,
},
{
@@ -432,7 +458,7 @@ func TestArrayIndexExpressions(t *testing.T) {
}
func TestDictLiterals(t *testing.T) {
- input := `acha two = "two";
+ input := `fanya two = "two";
{
"one": 10 - 9,
two: 1 +1,
@@ -485,7 +511,7 @@ func TestDictIndexExpression(t *testing.T) {
nil,
},
{
- `acha key = "foo"; {"foo": 5}[key]`,
+ `fanya key = "foo"; {"foo": 5}[key]`,
5,
},
{
diff --git a/src/evaluator/forin.go b/src/evaluator/forin.go
new file mode 100644
index 0000000..28db0f5
--- /dev/null
+++ b/src/evaluator/forin.go
@@ -0,0 +1,29 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalForInExpression(fie *ast.ForIn, env *object.Environment, line int) object.Object {
+ iterable := Eval(fie.Iterable, env)
+ existingKeyIdentifier, okk := env.Get(fie.Key) // again, stay safe
+ existingValueIdentifier, okv := env.Get(fie.Value)
+ defer func() { // restore them later on
+ if okk {
+ env.Set(fie.Key, existingKeyIdentifier)
+ }
+ if okv {
+ env.Set(fie.Value, existingValueIdentifier)
+ }
+ }()
+ switch i := iterable.(type) {
+ case object.Iterable:
+ defer func() {
+ i.Reset()
+ }()
+ return loopIterable(i.Next, env, fie)
+ default:
+ return newError("Mstari %d: Huwezi kufanya operesheni hii na %s", line, i.Type())
+ }
+}
\ No newline at end of file
diff --git a/src/evaluator/identifier.go b/src/evaluator/identifier.go
new file mode 100644
index 0000000..7745a23
--- /dev/null
+++ b/src/evaluator/identifier.go
@@ -0,0 +1,17 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalIdentifier(node *ast.Identifier, env *object.Environment) object.Object {
+ if val, ok := env.Get(node.Value); ok {
+ return val
+ }
+ if builtin, ok := builtins[node.Value]; ok {
+ return builtin
+ }
+
+ return newError("Mstari %d: Neno Halifahamiki: %s", node.Token.Line, node.Value)
+}
\ No newline at end of file
diff --git a/src/evaluator/if.go b/src/evaluator/if.go
new file mode 100644
index 0000000..890f24f
--- /dev/null
+++ b/src/evaluator/if.go
@@ -0,0 +1,22 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalIfExpression(ie *ast.IfExpression, env *object.Environment) object.Object {
+ condition := Eval(ie.Condition, env)
+
+ if isError(condition) {
+ return condition
+ }
+
+ if isTruthy(condition) {
+ return Eval(ie.Consequence, env)
+ } else if ie.Alternative != nil {
+ return Eval(ie.Alternative, env)
+ } else {
+ return NULL
+ }
+}
\ No newline at end of file
diff --git a/src/evaluator/in.go b/src/evaluator/in.go
new file mode 100644
index 0000000..446b35c
--- /dev/null
+++ b/src/evaluator/in.go
@@ -0,0 +1,81 @@
+package evaluator
+
+import (
+ "strings"
+
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalInExpression(left, right object.Object, line int) object.Object {
+ switch right.(type) {
+ case *object.String:
+ return evalInStringExpression(left, right)
+ case *object.Array:
+ return evalInArrayExpression(left, right)
+ case *object.Dict:
+ return evalInDictExpression(left, right, line)
+ default:
+ return FALSE
+ }
+}
+
+func evalInStringExpression(left, right object.Object) object.Object {
+ if left.Type() != object.STRING_OBJ {
+ return FALSE
+ }
+ leftVal := left.(*object.String)
+ rightVal := right.(*object.String)
+ found := strings.Contains(rightVal.Value, leftVal.Value)
+ return nativeBoolToBooleanObject(found)
+}
+
+func evalInDictExpression(left, right object.Object, line int) object.Object {
+ leftVal, ok := left.(object.Hashable)
+ if !ok {
+ return newError("Mstari %d: Huwezi kutumia kama 'key': %s", line, left.Type())
+ }
+ key := leftVal.HashKey()
+ rightVal := right.(*object.Dict).Pairs
+ _, ok = rightVal[key]
+ return nativeBoolToBooleanObject(ok)
+}
+
+func evalInArrayExpression(left, right object.Object) object.Object {
+ rightVal := right.(*object.Array)
+ switch leftVal := left.(type) {
+ case *object.Null:
+ for _, v := range rightVal.Elements {
+ if v.Type() == object.NULL_OBJ {
+ return TRUE
+ }
+ }
+ case *object.String:
+ for _, v := range rightVal.Elements {
+ if v.Type() == object.STRING_OBJ {
+ elem := v.(*object.String)
+ if elem.Value == leftVal.Value {
+ return TRUE
+ }
+ }
+ }
+ case *object.Integer:
+ for _, v := range rightVal.Elements {
+ if v.Type() == object.INTEGER_OBJ {
+ elem := v.(*object.Integer)
+ if elem.Value == leftVal.Value {
+ return TRUE
+ }
+ }
+ }
+ case *object.Float:
+ for _, v := range rightVal.Elements {
+ if v.Type() == object.FLOAT_OBJ {
+ elem := v.(*object.Float)
+ if elem.Value == leftVal.Value {
+ return TRUE
+ }
+ }
+ }
+ }
+ return FALSE
+}
diff --git a/src/evaluator/index.go b/src/evaluator/index.go
new file mode 100644
index 0000000..07945f0
--- /dev/null
+++ b/src/evaluator/index.go
@@ -0,0 +1,44 @@
+package evaluator
+
+import "github.com/AvicennaJr/Nuru/object"
+
+func evalIndexExpression(left, index object.Object, line int) object.Object {
+ switch {
+ case left.Type() == object.ARRAY_OBJ && index.Type() == object.INTEGER_OBJ:
+ return evalArrayIndexExpression(left, index)
+ case left.Type() == object.ARRAY_OBJ && index.Type() != object.INTEGER_OBJ:
+ return newError("Mstari %d: Tafadhali tumia number, sio: %s", line, index.Type())
+ case left.Type() == object.DICT_OBJ:
+ return evalDictIndexExpression(left, index, line)
+ default:
+ return newError("Mstari %d: Operesheni hii haiwezekani kwa: %s", line, left.Type())
+ }
+}
+
+func evalArrayIndexExpression(array, index object.Object) object.Object {
+ arrayObject := array.(*object.Array)
+ idx := index.(*object.Integer).Value
+ max := int64(len(arrayObject.Elements) - 1)
+
+ if idx < 0 || idx > max {
+ return NULL
+ }
+
+ return arrayObject.Elements[idx]
+}
+
+func evalDictIndexExpression(dict, index object.Object, line int) object.Object {
+ dictObject := dict.(*object.Dict)
+
+ key, ok := index.(object.Hashable)
+ if !ok {
+ return newError("Mstari %d: Samahani, %s haitumiki kama key", line, index.Type())
+ }
+
+ pair, ok := dictObject.Pairs[key.HashKey()]
+ if !ok {
+ return NULL
+ }
+
+ return pair.Value
+}
\ No newline at end of file
diff --git a/src/evaluator/infix.go b/src/evaluator/infix.go
new file mode 100644
index 0000000..c20706a
--- /dev/null
+++ b/src/evaluator/infix.go
@@ -0,0 +1,246 @@
+package evaluator
+
+import (
+ "math"
+ "strings"
+
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalInfixExpression(operator string, left, right object.Object, line int) object.Object {
+ if left == nil {
+ return newError("Mstari %d: Umekosea hapa", line)
+ }
+ switch {
+
+ case operator == "ktk":
+ return evalInExpression(left, right, line)
+
+ case left.Type() == object.STRING_OBJ && right.Type() == object.STRING_OBJ:
+ return evalStringInfixExpression(operator, left, right, line)
+
+ case operator == "+" && left.Type() == object.DICT_OBJ && right.Type() == object.DICT_OBJ:
+ leftVal := left.(*object.Dict).Pairs
+ rightVal := right.(*object.Dict).Pairs
+ pairs := make(map[object.HashKey]object.DictPair)
+ for k, v := range leftVal {
+ pairs[k] = v
+ }
+ for k, v := range rightVal {
+ pairs[k] = v
+ }
+ return &object.Dict{Pairs: pairs}
+
+ case operator == "+" && left.Type() == object.ARRAY_OBJ && right.Type() == object.ARRAY_OBJ:
+ leftVal := left.(*object.Array).Elements
+ rightVal := right.(*object.Array).Elements
+ elements := append(leftVal, rightVal...)
+ return &object.Array{Elements: elements}
+
+ case operator == "*" && left.Type() == object.ARRAY_OBJ && right.Type() == object.INTEGER_OBJ:
+ leftVal := left.(*object.Array).Elements
+ rightVal := int(right.(*object.Integer).Value)
+ elements := leftVal
+ for i := rightVal; i > 1; i-- {
+ elements = append(elements, leftVal...)
+ }
+ return &object.Array{Elements: elements}
+
+ case operator == "*" && left.Type() == object.INTEGER_OBJ && right.Type() == object.ARRAY_OBJ:
+ leftVal := int(left.(*object.Integer).Value)
+ rightVal := right.(*object.Array).Elements
+ elements := rightVal
+ for i := leftVal; i > 1; i-- {
+ elements = append(elements, rightVal...)
+ }
+ return &object.Array{Elements: elements}
+
+ case operator == "*" && left.Type() == object.STRING_OBJ && right.Type() == object.INTEGER_OBJ:
+ leftVal := left.(*object.String).Value
+ rightVal := right.(*object.Integer).Value
+ return &object.String{Value: strings.Repeat(leftVal, int(rightVal))}
+
+ case operator == "*" && left.Type() == object.INTEGER_OBJ && right.Type() == object.STRING_OBJ:
+ leftVal := left.(*object.Integer).Value
+ rightVal := right.(*object.String).Value
+ return &object.String{Value: strings.Repeat(rightVal, int(leftVal))}
+
+ case left.Type() == object.INTEGER_OBJ && right.Type() == object.INTEGER_OBJ:
+ return evalIntegerInfixExpression(operator, left, right, line)
+
+ case left.Type() == object.FLOAT_OBJ && right.Type() == object.FLOAT_OBJ:
+ return evalFloatInfixExpression(operator, left, right, line)
+
+ case left.Type() == object.INTEGER_OBJ && right.Type() == object.FLOAT_OBJ:
+ return evalFloatIntegerInfixExpression(operator, left, right, line)
+
+ case left.Type() == object.FLOAT_OBJ && right.Type() == object.INTEGER_OBJ:
+ return evalFloatIntegerInfixExpression(operator, left, right, line)
+
+ case operator == "==":
+ return nativeBoolToBooleanObject(left == right)
+
+ case operator == "!=":
+ return nativeBoolToBooleanObject(left != right)
+ case left.Type() == object.BOOLEAN_OBJ && right.Type() == object.BOOLEAN_OBJ:
+ return evalBooleanInfixExpression(operator, left, right, line)
+
+ case left.Type() != right.Type():
+ return newError("Mstari %d: Aina Hazilingani: %s %s %s",
+ line, left.Type(), operator, right.Type())
+
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: %s %s %s",
+ line, left.Type(), operator, right.Type())
+ }
+}
+
+func evalFloatIntegerInfixExpression(operator string, left, right object.Object, line int) object.Object {
+ var leftVal, rightVal float64
+ if left.Type() == object.FLOAT_OBJ {
+ leftVal = left.(*object.Float).Value
+ rightVal = float64(right.(*object.Integer).Value)
+ } else {
+ leftVal = float64(left.(*object.Integer).Value)
+ rightVal = right.(*object.Float).Value
+ }
+
+ var val float64
+ switch operator {
+ case "+":
+ val = leftVal + rightVal
+ case "-":
+ val = leftVal - rightVal
+ case "*":
+ val = leftVal * rightVal
+ case "**":
+ val = math.Pow(float64(leftVal), float64(rightVal))
+ case "/":
+ val = leftVal / rightVal
+ case "<":
+ return nativeBoolToBooleanObject(leftVal < rightVal)
+ case "<=":
+ return nativeBoolToBooleanObject(leftVal <= rightVal)
+ case ">":
+ return nativeBoolToBooleanObject(leftVal > rightVal)
+ case ">=":
+ return nativeBoolToBooleanObject(leftVal >= rightVal)
+ case "==":
+ return nativeBoolToBooleanObject(leftVal == rightVal)
+ case "!=":
+ return nativeBoolToBooleanObject(leftVal != rightVal)
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: %s %s %s",
+ line, left.Type(), operator, right.Type())
+ }
+
+ if math.Mod(val, 1) == 0 {
+ return &object.Integer{Value: int64(val)}
+ } else {
+ return &object.Float{Value: val}
+ }
+}
+
+func evalStringInfixExpression(operator string, left, right object.Object, line int) object.Object {
+
+ leftVal := left.(*object.String).Value
+ rightVal := right.(*object.String).Value
+
+ switch operator {
+ case "+":
+ return &object.String{Value: leftVal + rightVal}
+ case "==":
+ return nativeBoolToBooleanObject(leftVal == rightVal)
+ case "!=":
+ return nativeBoolToBooleanObject(leftVal != rightVal)
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: %s %s %s", line, left.Type(), operator, right.Type())
+ }
+}
+
+func evalBooleanInfixExpression(operator string, left, right object.Object, line int) object.Object {
+ leftVal := left.(*object.Boolean).Value
+ rightVal := right.(*object.Boolean).Value
+
+ switch operator {
+ case "&&":
+ return nativeBoolToBooleanObject(leftVal && rightVal)
+ case "||":
+ return nativeBoolToBooleanObject(leftVal || rightVal)
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: %s %s %s", line, left.Type(), operator, right.Type())
+ }
+}
+
+func evalFloatInfixExpression(operator string, left, right object.Object, line int) object.Object {
+ leftVal := left.(*object.Float).Value
+ rightVal := right.(*object.Float).Value
+
+ switch operator {
+ case "+":
+ return &object.Float{Value: leftVal + rightVal}
+ case "-":
+ return &object.Float{Value: leftVal - rightVal}
+ case "*":
+ return &object.Float{Value: leftVal * rightVal}
+ case "**":
+ return &object.Float{Value: math.Pow(float64(leftVal), float64(rightVal))}
+ case "/":
+ return &object.Float{Value: leftVal / rightVal}
+ case "<":
+ return nativeBoolToBooleanObject(leftVal < rightVal)
+ case "<=":
+ return nativeBoolToBooleanObject(leftVal <= rightVal)
+ case ">":
+ return nativeBoolToBooleanObject(leftVal > rightVal)
+ case ">=":
+ return nativeBoolToBooleanObject(leftVal >= rightVal)
+ case "==":
+ return nativeBoolToBooleanObject(leftVal == rightVal)
+ case "!=":
+ return nativeBoolToBooleanObject(leftVal != rightVal)
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: %s %s %s",
+ line, left.Type(), operator, right.Type())
+ }
+}
+
+func evalIntegerInfixExpression(operator string, left, right object.Object, line int) object.Object {
+ leftVal := left.(*object.Integer).Value
+ rightVal := right.(*object.Integer).Value
+
+ switch operator {
+ case "+":
+ return &object.Integer{Value: leftVal + rightVal}
+ case "-":
+ return &object.Integer{Value: leftVal - rightVal}
+ case "*":
+ return &object.Integer{Value: leftVal * rightVal}
+ case "**":
+ return &object.Integer{Value: int64(math.Pow(float64(leftVal), float64(rightVal)))}
+ case "/":
+ x := float64(leftVal) / float64(rightVal)
+ if math.Mod(x, 1) == 0 {
+ return &object.Integer{Value: int64(x)}
+ } else {
+ return &object.Float{Value: x}
+ }
+ case "%":
+ return &object.Integer{Value: leftVal % rightVal}
+ case "<":
+ return nativeBoolToBooleanObject(leftVal < rightVal)
+ case "<=":
+ return nativeBoolToBooleanObject(leftVal <= rightVal)
+ case ">":
+ return nativeBoolToBooleanObject(leftVal > rightVal)
+ case ">=":
+ return nativeBoolToBooleanObject(leftVal >= rightVal)
+ case "==":
+ return nativeBoolToBooleanObject(leftVal == rightVal)
+ case "!=":
+ return nativeBoolToBooleanObject(leftVal != rightVal)
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: %s %s %s",
+ line, left.Type(), operator, right.Type())
+ }
+}
diff --git a/src/evaluator/method.go b/src/evaluator/method.go
new file mode 100644
index 0000000..884ca85
--- /dev/null
+++ b/src/evaluator/method.go
@@ -0,0 +1,28 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalMethodExpression(node *ast.MethodExpression, env *object.Environment) object.Object {
+ obj := Eval(node.Object, env)
+ if isError(obj) {
+ return obj
+ }
+ args := evalExpressions(node.Arguments, env)
+ if len(args) == 1 && isError(args[0]) {
+ return args[0]
+ }
+ return applyMethod(obj, node.Method, args)
+}
+
+func applyMethod(obj object.Object, method ast.Expression, args []object.Object) object.Object {
+ switch obj := obj.(type) {
+ case *object.String:
+ return obj.Method(method.(*ast.Identifier).Value, args)
+ case *object.File:
+ return obj.Method(method.(*ast.Identifier).Value, args)
+ }
+ return newError("Samahani, %s haina function '%s()'", obj.Inspect(), method.(*ast.Identifier).Value)
+}
diff --git a/src/evaluator/postfix.go b/src/evaluator/postfix.go
new file mode 100644
index 0000000..ea90a88
--- /dev/null
+++ b/src/evaluator/postfix.go
@@ -0,0 +1,40 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalPostfixExpression(env *object.Environment, operator string, node *ast.PostfixExpression) object.Object {
+ val, ok := env.Get(node.Token.Literal)
+ if !ok {
+ return newError("Tumia KITAMBULISHI CHA NAMBA AU DESIMALI, sio %s", node.Token.Type)
+ }
+ switch operator {
+ case "++":
+ switch arg := val.(type) {
+ case *object.Integer:
+ v := arg.Value + 1
+ return env.Set(node.Token.Literal, &object.Integer{Value: v})
+ case *object.Float:
+ v := arg.Value + 1
+ return env.Set(node.Token.Literal, &object.Float{Value: v})
+ default:
+ return newError("Mstari %d: %s sio kitambulishi cha namba. Tumia '++' na kitambulishi cha namba au desimali.\nMfano:\tfanya i = 2; i++", node.Token.Line, node.Token.Literal)
+
+ }
+ case "--":
+ switch arg := val.(type) {
+ case *object.Integer:
+ v := arg.Value - 1
+ return env.Set(node.Token.Literal, &object.Integer{Value: v})
+ case *object.Float:
+ v := arg.Value - 1
+ return env.Set(node.Token.Literal, &object.Float{Value: v})
+ default:
+ return newError("Mstari %d: %s sio kitambulishi cha namba. Tumia '--' na kitambulishi cha namba au desimali.\nMfano:\tfanya i = 2; i++", node.Token.Line, node.Token.Literal)
+ }
+ default:
+ return newError("Haifahamiki: %s", operator)
+ }
+}
\ No newline at end of file
diff --git a/src/evaluator/prefix.go b/src/evaluator/prefix.go
new file mode 100644
index 0000000..cec053b
--- /dev/null
+++ b/src/evaluator/prefix.go
@@ -0,0 +1,43 @@
+package evaluator
+
+import "github.com/AvicennaJr/Nuru/object"
+
+func evalMinusPrefixOperatorExpression(right object.Object, line int) object.Object {
+ switch obj := right.(type) {
+
+ case *object.Integer:
+ return &object.Integer{Value: -obj.Value}
+
+ case *object.Float:
+ return &object.Float{Value: -obj.Value}
+
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: -%s", line, right.Type())
+ }
+}
+func evalPlusPrefixOperatorExpression(right object.Object, line int) object.Object {
+ switch obj := right.(type) {
+
+ case *object.Integer:
+ return &object.Integer{Value: obj.Value}
+
+ case *object.Float:
+ return &object.Float{Value: obj.Value}
+
+ default:
+ return newError("Mstari %d: Operesheni Haielweki: -%s", line, right.Type())
+ }
+}
+
+func evalPrefixExpression(operator string, right object.Object, line int) object.Object {
+ switch operator {
+ case "!":
+ return evalBangOperatorExpression(right)
+ case "-":
+ return evalMinusPrefixOperatorExpression(right, line)
+ case "+":
+ return evalPlusPrefixOperatorExpression(right, line)
+ default:
+ return newError("Mstari %d: Operesheni haieleweki: %s%s", line, operator, right.Type())
+ }
+}
diff --git a/src/evaluator/switch.go b/src/evaluator/switch.go
new file mode 100644
index 0000000..53fe6f4
--- /dev/null
+++ b/src/evaluator/switch.go
@@ -0,0 +1,30 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalSwitchStatement(se *ast.SwitchExpression, env *object.Environment) object.Object {
+ obj := Eval(se.Value, env)
+ for _, opt := range se.Choices {
+
+ if opt.Default {
+ continue
+ }
+ for _, val := range opt.Expr {
+ out := Eval(val, env)
+ if obj.Type() == out.Type() && obj.Inspect() == out.Inspect() {
+ blockOut := evalBlockStatement(opt.Block, env)
+ return blockOut
+ }
+ }
+ }
+ for _, opt := range se.Choices {
+ if opt.Default {
+ out := evalBlockStatement(opt.Block, env)
+ return out
+ }
+ }
+ return nil
+}
\ No newline at end of file
diff --git a/src/evaluator/while.go b/src/evaluator/while.go
new file mode 100644
index 0000000..bf40681
--- /dev/null
+++ b/src/evaluator/while.go
@@ -0,0 +1,24 @@
+package evaluator
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/object"
+)
+
+func evalWhileExpression(we *ast.WhileExpression, env *object.Environment) object.Object {
+ condition := Eval(we.Condition, env)
+ if isError(condition) {
+ return condition
+ }
+ if isTruthy(condition) {
+ evaluated := Eval(we.Consequence, env)
+ if isError(evaluated) {
+ return evaluated
+ }
+ if evaluated != nil && evaluated.Type() == object.BREAK_OBJ {
+ return evaluated
+ }
+ evalWhileExpression(we, env)
+ }
+ return NULL
+}
\ No newline at end of file
diff --git a/examples/example.nr b/src/examples/example.nr
similarity index 78%
rename from examples/example.nr
rename to src/examples/example.nr
index f8f2c79..7ebacef 100644
--- a/examples/example.nr
+++ b/src/examples/example.nr
@@ -1,8 +1,8 @@
andika("Testing basic types...");
andika(2 + 2);
andika(4 * 4);
-acha a = 10;
-acha b = 20;
+fanya a = 10;
+fanya b = 20;
andika(a + b);
@@ -15,23 +15,23 @@ andika("Mambo vipi");
andika("Testing Functions... ");
-acha jumlisha = fn(x, y) {x + y};
+fanya jumlisha = unda(x, y) {x + y};
andika(jumlisha(20,30));
andika(jumlisha(100,1000));
-acha zidisha = fn(x, y) {x * y};
+fanya zidisha = unda(x, y) {x * y};
andika(zidisha(100,1000));
andika(zidisha(200, 20));
// lists can hold any value
andika("Testing lists...");
-acha list = [1, "a", kweli, sikweli];
+fanya list = [1, "a", kweli, sikweli];
// a few builtins
-acha list = sukuma(list, jumlisha(4,5));
+fanya list = sukuma(list, jumlisha(4,5));
andika(list);
andika(list[2]);
@@ -57,7 +57,7 @@ kama (idadi("Habari") == 6) {
// fibonacci example
andika("Testing fibonacci...");
-acha fibo = fn(x) {
+fanya fibo = unda(x) {
kama (x == 0) {
rudisha 0;
} au kama (x == 1) {
@@ -72,8 +72,8 @@ andika(fibo(10));
// testing input
andika("Testing input from user...");
-acha salamu = fn() {
- acha jina = jaza("Unaitwa nani rafiki? ");
+fanya salamu = unda() {
+ fanya jina = jaza("Unaitwa nani rafiki? ");
rudisha "Mambo vipi " + jina;
}
@@ -87,7 +87,7 @@ Multiline comment
andika("Testing dictionaries...")
-acha watu = [{"jina": "Mojo", "kabila": "Mnyakusa"}, {"jina": "Avi", "kabila": "Mwarabu wa dubai"}]
+fanya watu = [{"jina": "Mojo", "kabila": "Mnyakusa"}, {"jina": "Avi", "kabila": "Mwarabu wa dubai"}]
andika(watu, watu[0], watu[0]["jina"], watu[0]["kabila"])
@@ -99,7 +99,7 @@ andika({"a":1} + {"b": 2})
andika("Testing while loop...");
-acha i = 10;
+fanya i = 10;
wakati (i > 0) {
andika(i);
diff --git a/src/examples/sorting_algorithm.nr b/src/examples/sorting_algorithm.nr
new file mode 100644
index 0000000..10d6131
--- /dev/null
+++ b/src/examples/sorting_algorithm.nr
@@ -0,0 +1,63 @@
+/*
+############ Sorting Algorithm ##############
+
+ By @VictorKariuki
+
+ https://github.com/VictorKariuki
+
+#############################################
+*/
+
+fanya slice = unda(arr,start, end) {
+ fanya result = []
+ wakati (start < end) {
+ result = result + [arr[start]]
+ start = start + 1
+ }
+ rudisha result
+}
+
+fanya merge = unda(left, right) {
+ fanya result = []
+ fanya lLen = idadi(left)
+ fanya rLen = idadi(right)
+ fanya l = 0
+ fanya r = 0
+ wakati (l < lLen && r < rLen) {
+ kama (left[l] < right[r]) {
+ result = result + [left[l]]
+ l = l + 1
+ } sivyo {
+ result = result + [right[r]]
+ r = r + 1
+ }
+ }
+ andika(result)
+}
+
+
+fanya mergeSort = unda(arr){
+ fanya len = idadi(arr)
+ andika("arr is ", arr," of length ", len)
+ kama (len < 2) {
+ rudisha arr
+ }
+ andika("len is greater than or == to 2", len > 1)
+
+ fanya mid = (len / 2)
+ andika("arr has a mid point of ", mid)
+
+ fanya left = slice(arr, 0, mid)
+ fanya right = slice(arr, mid, len)
+ andika("left slice is ", left)
+ andika("right slice is ", right)
+ fanya sortedLeft = mergeSort(left)
+ fanya sortedRight = mergeSort(right)
+ andika("sortedLeft is ", sortedLeft)
+ andika("sortedRight is ", sortedRight)
+ rudisha merge(sortedLeft, sortedRight)
+}
+
+fanya arr = [6, 5, 3, 1, 8, 7, 2, 4]
+fanya sortedArray = mergeSort(arr)
+andika(sortedArray)
\ No newline at end of file
diff --git a/src/examples/sudoku_solver.nr b/src/examples/sudoku_solver.nr
new file mode 100644
index 0000000..de5d3a6
--- /dev/null
+++ b/src/examples/sudoku_solver.nr
@@ -0,0 +1,101 @@
+/*########### Backtracking Algorithm ##############
+
+ By @VictorKariuki
+
+ https://github.com/VictorKariuki
+
+NURU program to solve Sudoku using Backtracking Algorithm
+
+The sudoku puzzle is represented as a 2D array. The empty
+cells are represented by 0. The algorithm works by trying
+out all possible numbers for an empty cell. If the number
+is valid, it is placed in the cell. If the number is invalid,
+the algorithm backtracks to the previous cell and tries
+another number. The algorithm terminates when all cells
+are filled. The algorithm is implemented in the solveSudoku
+function. The isValid function checks kama a number is
+valid in a given cell. The printSudoku function prints
+the sudoku puzzle. The solveSudoku function solves the
+sudoku puzzle. The main function initializes the sudoku
+puzzle and calls the solveSudoku function.
+
+#################################################*/
+
+
+fanya printing = unda(sudoku) {
+ fanya row = 0
+ wakati (row < 9){
+ andika(sudoku[row])
+ row++
+ }
+}
+
+fanya sudoku = [[3, 0, 6, 5, 0, 8, 4, 0, 0],[5, 2, 0, 0, 0, 0, 0, 0, 0],[0, 8, 7, 0, 0, 0, 0, 3, 1],[0, 0, 3, 0, 1, 0, 0, 8, 0],[9, 0, 0, 8, 6, 3, 0, 0, 5],[0, 5, 0, 0, 9, 0, 6, 0, 0],[1, 3, 0, 0, 0, 0, 2, 5, 0],[0, 0, 0, 0, 0, 0, 0, 7, 4],[0, 0, 5, 2, 0, 6, 3, 0, 0]]
+
+
+
+fanya isSafe = unda(grid, row, col, num) {
+ kwa x ktk [0,1,2,3,4,5,6,7,8] {
+ kama (grid[row][x] == num) {
+ rudisha sikweli
+ }
+ }
+
+ kwa x ktk [0,1,2,3,4,5,6,7,8] {
+ kama (grid[x][col] == num) {
+ rudisha sikweli
+ }
+ }
+
+ fanya startRow = row - row % 3
+ fanya startCol = col - col % 3
+
+ kwa i ktk [0, 1, 2] {
+ kwa j ktk [0, 1, 2] {
+ kama (grid[i + startRow][j + startCol] == num) {
+ rudisha sikweli
+ }
+ }
+ }
+
+ rudisha kweli
+}
+
+fanya solveSudoku = unda(grid, row, col) {
+ kama (row == 8 && col == 9) {
+ rudisha kweli
+ }
+
+ kama (col == 9) {
+ row += 1
+ col = 0
+ }
+
+ kama (grid[row][col] > 0) {
+ rudisha solveSudoku(grid, row, col + 1)
+ }
+
+ kwa num ktk [1,2,3,4,5,6,7,8,9] {
+ kama (isSafe(grid, row, col, num)) {
+ grid[row][col] = num
+ kama (solveSudoku(grid, row, col + 1)) {
+ rudisha kweli
+ }
+ }
+
+ grid[row][col] = 0
+ }
+
+ rudisha sikweli
+}
+andika()
+andika("----- PUZZLE TO SOLVE -----")
+printing(sudoku)
+kama (solveSudoku(sudoku, 0, 0)){
+ andika()
+ andika("--------- SOLUTION --------")
+ printing(sudoku)
+ andika()
+} sivyo {
+ andika("imeshindikana")
+}
\ No newline at end of file
diff --git a/go.mod b/src/go.mod
similarity index 100%
rename from go.mod
rename to src/go.mod
diff --git a/src/lexer/lexer.go b/src/lexer/lexer.go
new file mode 100644
index 0000000..02b07db
--- /dev/null
+++ b/src/lexer/lexer.go
@@ -0,0 +1,329 @@
+package lexer
+
+import (
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+type Lexer struct {
+ input string
+ position int
+ readPosition int
+ ch byte
+ line int
+}
+
+func New(input string) *Lexer {
+ l := &Lexer{input: input}
+ l.readChar()
+ return l
+}
+
+func (l *Lexer) readChar() {
+ if l.readPosition >= len(l.input) {
+ l.ch = 0
+ } else {
+ l.ch = l.input[l.readPosition]
+ }
+
+ l.position = l.readPosition
+ l.readPosition += 1
+}
+
+func (l *Lexer) NextToken() token.Token {
+ var tok token.Token
+ l.skipWhitespace()
+ if l.ch == '/' && l.peekChar() == '/' {
+ l.skipSingleLineComment()
+ return l.NextToken()
+ }
+ if l.ch == '/' && l.peekChar() == '*' {
+ l.skipMultiLineComment()
+ return l.NextToken()
+ }
+
+ switch l.ch {
+ case '=':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.EQ, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.ASSIGN, l.line, l.ch)
+ }
+ case ';':
+ tok = newToken(token.SEMICOLON, l.line, l.ch)
+ case '(':
+ tok = newToken(token.LPAREN, l.line, l.ch)
+ case ')':
+ tok = newToken(token.RPAREN, l.line, l.ch)
+ case '{':
+ tok = newToken(token.LBRACE, l.line, l.ch)
+ case '}':
+ tok = newToken(token.RBRACE, l.line, l.ch)
+ case ',':
+ tok = newToken(token.COMMA, l.line, l.ch)
+ case '+':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.PLUS_ASSIGN, Line: l.line, Literal: string(ch) + string(l.ch)}
+ } else if l.peekChar() == '+' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.PLUS_PLUS, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.PLUS, l.line, l.ch)
+ }
+ case '-':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.MINUS_ASSIGN, Line: l.line, Literal: string(ch) + string(l.ch)}
+ } else if l.peekChar() == '-' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.MINUS_MINUS, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.MINUS, l.line, l.ch)
+ }
+ case '!':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.NOT_EQ, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.BANG, l.line, l.ch)
+ }
+ case '/':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.SLASH_ASSIGN, Line: l.line, Literal: string(ch) + string(l.ch)}
+ } else {
+ tok = newToken(token.SLASH, l.line, l.ch)
+ }
+ case '*':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.ASTERISK_ASSIGN, Line: l.line, Literal: string(ch) + string(l.ch)}
+ } else if l.peekChar() == '*' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.POW, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.ASTERISK, l.line, l.ch)
+ }
+ case '<':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.LTE, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.LT, l.line, l.ch)
+ }
+ case '>':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.GTE, Literal: string(ch) + string(l.ch), Line: l.line}
+ } else {
+ tok = newToken(token.GT, l.line, l.ch)
+ }
+ case '"':
+ tok.Type = token.STRING
+ tok.Literal = l.readString()
+ tok.Line = l.line
+ case '\'':
+ tok = token.Token{Type: token.STRING, Literal: l.readSingleQuoteString(), Line: l.line}
+ case '[':
+ tok = newToken(token.LBRACKET, l.line, l.ch)
+ case ']':
+ tok = newToken(token.RBRACKET, l.line, l.ch)
+ case ':':
+ tok = newToken(token.COLON, l.line, l.ch)
+ case '.':
+ tok = newToken(token.DOT, l.line, l.ch)
+ case '&':
+ if l.peekChar() == '&' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.AND, Literal: string(ch) + string(l.ch), Line: l.line}
+ }
+ case '|':
+ if l.peekChar() == '|' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.OR, Literal: string(ch) + string(l.ch), Line: l.line}
+ }
+ case '%':
+ if l.peekChar() == '=' {
+ ch := l.ch
+ l.readChar()
+ tok = token.Token{Type: token.MODULUS_ASSIGN, Line: l.line, Literal: string(ch) + string(l.ch)}
+ } else {
+ tok = newToken(token.MODULUS, l.line, l.ch)
+ }
+ case 0:
+ tok.Literal = ""
+ tok.Type = token.EOF
+ tok.Line = l.line
+ default:
+ if isLetter(l.ch) {
+ tok.Literal = l.readIdentifier()
+ tok.Type = token.LookupIdent(tok.Literal)
+ tok.Line = l.line
+ return tok
+ } else if isDigit(l.ch) {
+ tok = l.readDecimal()
+ return tok
+ } else {
+ tok = newToken(token.ILLEGAL, l.line, l.ch)
+ }
+ }
+
+ l.readChar()
+ return tok
+}
+
+func newToken(tokenType token.TokenType, line int, ch byte) token.Token {
+ return token.Token{Type: tokenType, Literal: string(ch), Line: line}
+}
+
+func (l *Lexer) readIdentifier() string {
+ position := l.position
+
+ for isLetter(l.ch) || isDigit(l.ch) {
+ l.readChar()
+ }
+ return l.input[position:l.position]
+}
+
+func isLetter(ch byte) bool {
+ return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
+}
+
+func (l *Lexer) skipWhitespace() {
+ for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
+ if l.ch == '\n' {
+ l.line++
+ }
+ l.readChar()
+ }
+}
+
+func isDigit(ch byte) bool {
+ return '0' <= ch && ch <= '9'
+}
+
+func (l *Lexer) readNumber() string {
+ position := l.position
+ for isDigit(l.ch) {
+ l.readChar()
+ }
+ return l.input[position:l.position]
+}
+
+func (l *Lexer) readDecimal() token.Token {
+ integer := l.readNumber()
+ if l.ch == '.' && isDigit(l.peekChar()) {
+ l.readChar()
+ fraction := l.readNumber()
+ return token.Token{Type: token.FLOAT, Literal: integer + "." + fraction, Line: l.line}
+ }
+ return token.Token{Type: token.INT, Literal: integer, Line: l.line}
+}
+
+func (l *Lexer) peekChar() byte {
+ if l.readPosition >= len(l.input) {
+ return 0
+ } else {
+ return l.input[l.readPosition]
+ }
+}
+
+func (l *Lexer) skipSingleLineComment() {
+ for l.ch != '\n' && l.ch != 0 {
+ l.readChar()
+ }
+ l.skipWhitespace()
+}
+
+func (l *Lexer) skipMultiLineComment() {
+ endFound := false
+
+ for !endFound {
+ if l.ch == 0 {
+ endFound = true
+ }
+
+ if l.ch == '*' && l.peekChar() == '/' {
+ endFound = true
+ l.readChar()
+ }
+
+ l.readChar()
+ l.skipWhitespace()
+ }
+
+}
+
+func (l *Lexer) readString() string {
+ var str string
+ for {
+ l.readChar()
+ if l.ch == '"' || l.ch == 0 {
+ break
+ } else if l.ch == '\\' {
+ switch l.peekChar() {
+ case 'n':
+ l.readChar()
+ l.ch = '\n'
+ case 'r':
+ l.readChar()
+ l.ch = '\r'
+ case 't':
+ l.readChar()
+ l.ch = '\t'
+ case '"':
+ l.readChar()
+ l.ch = '"'
+ case '\\':
+ l.readChar()
+ l.ch = '\\'
+ }
+ }
+ str += string(l.ch)
+ }
+ return str
+}
+
+func (l *Lexer) readSingleQuoteString() string {
+ var str string
+ for {
+ l.readChar()
+ if l.ch == '\'' || l.ch == 0 {
+ break
+ } else if l.ch == '\\' {
+ switch l.peekChar() {
+ case 'n':
+ l.readChar()
+ l.ch = '\n'
+ case 'r':
+ l.readChar()
+ l.ch = '\r'
+ case 't':
+ l.readChar()
+ l.ch = '\t'
+ case '"':
+ l.readChar()
+ l.ch = '"'
+ case '\\':
+ l.readChar()
+ l.ch = '\\'
+ }
+ }
+ str += string(l.ch)
+ }
+ return str
+}
diff --git a/lexer/lexer_test.go b/src/lexer/lexer_test.go
similarity index 91%
rename from lexer/lexer_test.go
rename to src/lexer/lexer_test.go
index ccf358e..bc11d38 100644
--- a/lexer/lexer_test.go
+++ b/src/lexer/lexer_test.go
@@ -9,14 +9,14 @@ import (
func TestNextToken(t *testing.T) {
input := `
// Testing kama lex luther iko sawa
- acha tano = 5;
- acha kumi = 10;
+ fanya tano = 5;
+ fanya kumi = 10;
- acha jumla = fn(x, y){
+ fanya jumla = unda(x, y){
x + y;
};
- acha jibu = jumla(tano, kumi);
+ fanya jibu = jumla(tano, kumi);
!-/5;
5 < 10 > 5;
@@ -40,26 +40,27 @@ func TestNextToken(t *testing.T) {
"bangi"
"ba ngi"
[1, 2];
- {"mambo": "vipi"}`
+ {"mambo": "vipi"}
+ . // test dot`
tests := []struct {
expectedType token.TokenType
expectedLiteral string
}{
- {token.LET, "acha"},
+ {token.LET, "fanya"},
{token.IDENT, "tano"},
{token.ASSIGN, "="},
{token.INT, "5"},
{token.SEMICOLON, ";"},
- {token.LET, "acha"},
+ {token.LET, "fanya"},
{token.IDENT, "kumi"},
{token.ASSIGN, "="},
{token.INT, "10"},
{token.SEMICOLON, ";"},
- {token.LET, "acha"},
+ {token.LET, "fanya"},
{token.IDENT, "jumla"},
{token.ASSIGN, "="},
- {token.FUNCTION, "fn"},
+ {token.FUNCTION, "unda"},
{token.LPAREN, "("},
{token.IDENT, "x"},
{token.COMMA, ","},
@@ -72,7 +73,7 @@ func TestNextToken(t *testing.T) {
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.SEMICOLON, ";"},
- {token.LET, "acha"},
+ {token.LET, "fanya"},
{token.IDENT, "jibu"},
{token.ASSIGN, "="},
{token.IDENT, "jumla"},
@@ -132,6 +133,7 @@ func TestNextToken(t *testing.T) {
{token.COLON, ":"},
{token.STRING, "vipi"},
{token.RBRACE, "}"},
+ {token.DOT, "."},
{token.EOF, ""},
}
diff --git a/src/main.go b/src/main.go
new file mode 100644
index 0000000..4a6941a
--- /dev/null
+++ b/src/main.go
@@ -0,0 +1,67 @@
+package main
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "strings"
+
+ "github.com/AvicennaJr/Nuru/repl"
+)
+
+const (
+ LOGO = `
+
+█░░ █░█ █▀▀ █░█ ▄▀█ █▄█ ▄▀█ █▄░█ █░█ █▀█ █░█
+█▄▄ █▄█ █▄█ █▀█ █▀█ ░█░ █▀█ █░▀█ █▄█ █▀▄ █▄█
+
+ | Authored by Avicenna | v0.2.0 |
+`
+)
+
+func main() {
+
+ args := os.Args
+ coloredLogo := fmt.Sprintf("\x1b[%dm%s\x1b[0m", 36, LOGO)
+
+ if len(args) < 2 {
+
+ fmt.Println(coloredLogo)
+ fmt.Println("𝑯𝒂𝒃𝒂𝒓𝒊, 𝒌𝒂𝒓𝒊𝒃𝒖 𝒖𝒕𝒖𝒎𝒊𝒆 𝒍𝒖𝒈𝒉𝒂 𝒚𝒂 𝑵𝒖𝒓𝒖 ✨")
+ fmt.Println("\nTumia exit() au toka() kuondoka")
+
+ repl.Start(os.Stdin, os.Stdout)
+ }
+
+ if len(args) == 2 {
+
+ switch args[1] {
+ case "msaada", "-msaada", "--msaada", "help", "-help", "--help", "-h":
+ fmt.Printf("\x1b[%dm%s\x1b[0m\n", 32, "\nTumia 'nuru' kuanza program\n\nAU\n\nTumia 'nuru' ikifuatiwa na jina la file.\n\n\tMfano:\tnuru fileYangu.nr")
+ os.Exit(0)
+ case "version", "-version", "--version", "-v", "v":
+ fmt.Println(coloredLogo)
+ os.Exit(0)
+ }
+
+ file := args[1]
+
+ if strings.HasSuffix(file, "nr") || strings.HasSuffix(file, ".sw") {
+ contents, err := ioutil.ReadFile(file)
+ if err != nil {
+ fmt.Printf("\x1b[%dm%s%s\x1b[0m\n", 31, "Error: Nimeshindwa kusoma file: ", args[0])
+ os.Exit(0)
+ }
+
+ repl.Read(string(contents))
+ } else {
+ fmt.Printf("\x1b[%dm%s%s\x1b[0m", 31, file, " sii file sahihi. Tumia file la '.nr' au '.sw'\n")
+ os.Exit(0)
+ }
+
+ } else {
+ fmt.Printf("\x1b[%dm%s\x1b[0m\n", 31, "Error: Operesheni imeshindikana boss.")
+ fmt.Printf("\x1b[%dm%s\x1b[0m\n", 32, "\nTumia 'nuru' kuprogram\n\nAU\n\nTumia 'nuru' ikifuatiwa na jina la file.\n\n\tMfano:\tnuru fileYangu.nr")
+ os.Exit(0)
+ }
+}
diff --git a/src/object/array.go b/src/object/array.go
new file mode 100644
index 0000000..027f63f
--- /dev/null
+++ b/src/object/array.go
@@ -0,0 +1,40 @@
+package object
+
+import (
+ "bytes"
+ "strings"
+)
+
+type Array struct {
+ Elements []Object
+ offset int
+}
+
+func (ao *Array) Type() ObjectType { return ARRAY_OBJ }
+func (ao *Array) Inspect() string {
+ var out bytes.Buffer
+
+ elements := []string{}
+ for _, e := range ao.Elements {
+ elements = append(elements, e.Inspect())
+ }
+
+ out.WriteString("[")
+ out.WriteString(strings.Join(elements, ", "))
+ out.WriteString("]")
+
+ return out.String()
+}
+
+func (ao *Array) Next() (Object, Object) {
+ idx := ao.offset
+ if len(ao.Elements) > idx {
+ ao.offset = idx + 1
+ return &Integer{Value: int64(idx)}, ao.Elements[idx]
+ }
+ return nil, nil
+}
+
+func (ao *Array) Reset() {
+ ao.offset = 0
+}
diff --git a/src/object/bool.go b/src/object/bool.go
new file mode 100644
index 0000000..8702d52
--- /dev/null
+++ b/src/object/bool.go
@@ -0,0 +1,26 @@
+package object
+
+type Boolean struct {
+ Value bool
+}
+
+func (b *Boolean) Inspect() string {
+ if b.Value {
+ return "kweli"
+ } else {
+ return "sikweli"
+ }
+}
+func (b *Boolean) Type() ObjectType { return BOOLEAN_OBJ }
+
+func (b *Boolean) HashKey() HashKey {
+ var value uint64
+
+ if b.Value {
+ value = 1
+ } else {
+ value = 0
+ }
+
+ return HashKey{Type: b.Type(), Value: value}
+}
diff --git a/src/object/break.go b/src/object/break.go
new file mode 100644
index 0000000..047b17c
--- /dev/null
+++ b/src/object/break.go
@@ -0,0 +1,6 @@
+package object
+
+type Break struct{}
+
+func (b *Break) Type() ObjectType { return BREAK_OBJ }
+func (b *Break) Inspect() string { return "break" }
diff --git a/src/object/builtin.go b/src/object/builtin.go
new file mode 100644
index 0000000..b47c051
--- /dev/null
+++ b/src/object/builtin.go
@@ -0,0 +1,10 @@
+package object
+
+type BuiltinFunction func(args ...Object) Object
+
+type Builtin struct {
+ Fn BuiltinFunction
+}
+
+func (b *Builtin) Inspect() string { return "builtin function" }
+func (b *Builtin) Type() ObjectType { return BUILTIN_OBJ }
diff --git a/src/object/continue.go b/src/object/continue.go
new file mode 100644
index 0000000..15c7355
--- /dev/null
+++ b/src/object/continue.go
@@ -0,0 +1,6 @@
+package object
+
+type Continue struct{}
+
+func (c *Continue) Type() ObjectType { return CONTINUE_OBJ }
+func (c *Continue) Inspect() string { return "continue" }
diff --git a/src/object/dict.go b/src/object/dict.go
new file mode 100644
index 0000000..86ca482
--- /dev/null
+++ b/src/object/dict.go
@@ -0,0 +1,60 @@
+package object
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+ "strings"
+)
+
+type DictPair struct {
+ Key Object
+ Value Object
+}
+
+type Dict struct {
+ Pairs map[HashKey]DictPair
+ offset int
+}
+
+func (d *Dict) Type() ObjectType { return DICT_OBJ }
+func (d *Dict) Inspect() string {
+ var out bytes.Buffer
+
+ pairs := []string{}
+
+ for _, pair := range d.Pairs {
+ pairs = append(pairs, fmt.Sprintf("%s: %s", pair.Key.Inspect(), pair.Value.Inspect()))
+ }
+
+ out.WriteString("{")
+ out.WriteString(strings.Join(pairs, ", "))
+ out.WriteString("}")
+
+ return out.String()
+}
+
+func (d *Dict) Next() (Object, Object) {
+ idx := 0
+ dict := make(map[string]DictPair)
+ var keys []string
+ for _, v := range d.Pairs {
+ dict[v.Key.Inspect()] = v
+ keys = append(keys, v.Key.Inspect())
+ }
+
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ if d.offset == idx {
+ d.offset += 1
+ return dict[k].Key, dict[k].Value
+ }
+ idx += 1
+ }
+ return nil, nil
+}
+
+func (d *Dict) Reset() {
+ d.offset = 0
+}
diff --git a/object/environment.go b/src/object/environment.go
similarity index 100%
rename from object/environment.go
rename to src/object/environment.go
diff --git a/src/object/error.go b/src/object/error.go
new file mode 100644
index 0000000..2bc65af
--- /dev/null
+++ b/src/object/error.go
@@ -0,0 +1,13 @@
+package object
+
+import "fmt"
+
+type Error struct {
+ Message string
+}
+
+func (e *Error) Inspect() string {
+ msg := fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, "Kosa: ")
+ return msg + e.Message
+}
+func (e *Error) Type() ObjectType { return ERROR_OBJ }
diff --git a/src/object/file.go b/src/object/file.go
new file mode 100644
index 0000000..7b3c744
--- /dev/null
+++ b/src/object/file.go
@@ -0,0 +1,45 @@
+package object
+
+import (
+ "bufio"
+ "io"
+ "os"
+)
+
+type File struct {
+ Filename string
+ Reader *bufio.Reader // To read the file
+ Writer *bufio.Writer // To write on the file
+ Handle *os.File // To handle the actual file (open, close etc)
+}
+
+func (f *File) Type() ObjectType { return FILE_OBJ }
+func (f *File) Inspect() string { return f.Filename }
+func (f *File) Method(method string, args []Object) Object {
+ switch method {
+ case "soma":
+ return f.read(args)
+ case "funga":
+ return f.close(args)
+ }
+ return nil
+}
+
+func (f *File) read(args []Object) Object {
+ if len(args) != 0 {
+ return newError("Samahani, tunahitaji Hoja 0, wewe umeweka %d", len(args))
+ }
+ if f.Reader == nil {
+ return nil
+ }
+ txt, _ := io.ReadAll(f.Reader)
+ return &String{Value: string(txt)}
+}
+
+func (f *File) close(args []Object) Object {
+ if len(args) != 0 {
+ return newError("Samahani, tunahitaji Hoja 0, wewe umeweka %d", len(args))
+ }
+ _ = f.Handle.Close()
+ return &Null{}
+}
diff --git a/src/object/float.go b/src/object/float.go
new file mode 100644
index 0000000..a4b2293
--- /dev/null
+++ b/src/object/float.go
@@ -0,0 +1,19 @@
+package object
+
+import (
+ "hash/fnv"
+ "strconv"
+)
+
+type Float struct {
+ Value float64
+}
+
+func (f *Float) Inspect() string { return strconv.FormatFloat(f.Value, 'f', -1, 64) }
+func (f *Float) Type() ObjectType { return FLOAT_OBJ }
+
+func (f *Float) HashKey() HashKey {
+ h := fnv.New64a()
+ h.Write([]byte(f.Inspect()))
+ return HashKey{Type: f.Type(), Value: h.Sum64()}
+}
diff --git a/src/object/function.go b/src/object/function.go
new file mode 100644
index 0000000..aecaf8a
--- /dev/null
+++ b/src/object/function.go
@@ -0,0 +1,33 @@
+package object
+
+import (
+ "bytes"
+ "strings"
+
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+type Function struct {
+ Parameters []*ast.Identifier
+ Body *ast.BlockStatement
+ Env *Environment
+}
+
+func (f *Function) Type() ObjectType { return FUNCTION_OBJ }
+func (f *Function) Inspect() string {
+ var out bytes.Buffer
+
+ params := []string{}
+ for _, p := range f.Parameters {
+ params = append(params, p.String())
+ }
+
+ out.WriteString("unda")
+ out.WriteString("(")
+ out.WriteString(strings.Join(params, ", "))
+ out.WriteString(") {\n")
+ out.WriteString(f.Body.String())
+ out.WriteString("\n}")
+
+ return out.String()
+}
diff --git a/src/object/integer.go b/src/object/integer.go
new file mode 100644
index 0000000..87cb945
--- /dev/null
+++ b/src/object/integer.go
@@ -0,0 +1,14 @@
+package object
+
+import "fmt"
+
+type Integer struct {
+ Value int64
+}
+
+func (i *Integer) Inspect() string { return fmt.Sprintf("%d", i.Value) }
+func (i *Integer) Type() ObjectType { return INTEGER_OBJ }
+
+func (i *Integer) HashKey() HashKey {
+ return HashKey{Type: i.Type(), Value: uint64(i.Value)}
+}
diff --git a/src/object/null.go b/src/object/null.go
new file mode 100644
index 0000000..1610994
--- /dev/null
+++ b/src/object/null.go
@@ -0,0 +1,6 @@
+package object
+
+type Null struct{}
+
+func (n *Null) Inspect() string { return "null" }
+func (n *Null) Type() ObjectType { return NULL_OBJ }
diff --git a/src/object/object.go b/src/object/object.go
new file mode 100644
index 0000000..e1b8389
--- /dev/null
+++ b/src/object/object.go
@@ -0,0 +1,49 @@
+package object
+
+import (
+ "fmt"
+)
+
+type ObjectType string
+
+const (
+ INTEGER_OBJ = "NAMBA"
+ FLOAT_OBJ = "DESIMALI"
+ BOOLEAN_OBJ = "BOOLEAN"
+ NULL_OBJ = "TUPU"
+ RETURN_VALUE_OBJ = "RUDISHA"
+ ERROR_OBJ = "KOSA"
+ FUNCTION_OBJ = "UNDO (FUNCTION)"
+ STRING_OBJ = "NENO"
+ BUILTIN_OBJ = "YA_NDANI"
+ ARRAY_OBJ = "ORODHA"
+ DICT_OBJ = "KAMUSI"
+ CONTINUE_OBJ = "ENDELEA"
+ BREAK_OBJ = "VUNJA"
+ FILE_OBJ = "FAILI"
+)
+
+type Object interface {
+ Type() ObjectType
+ Inspect() string
+}
+
+type HashKey struct {
+ Type ObjectType
+ Value uint64
+}
+
+type Hashable interface {
+ HashKey() HashKey
+}
+
+// Iterable interface for dicts, strings and arrays
+type Iterable interface {
+ Next() (Object, Object)
+ Reset()
+}
+
+func newError(format string, a ...interface{}) *Error {
+ format = fmt.Sprintf("\x1b[%dm%s\x1b[0m", 31, format)
+ return &Error{Message: fmt.Sprintf(format, a...)}
+}
diff --git a/object/object_test.go b/src/object/object_test.go
similarity index 100%
rename from object/object_test.go
rename to src/object/object_test.go
diff --git a/src/object/return.go b/src/object/return.go
new file mode 100644
index 0000000..42c7225
--- /dev/null
+++ b/src/object/return.go
@@ -0,0 +1,8 @@
+package object
+
+type ReturnValue struct {
+ Value Object
+}
+
+func (rv *ReturnValue) Inspect() string { return rv.Value.Inspect() }
+func (rv *ReturnValue) Type() ObjectType { return RETURN_VALUE_OBJ }
diff --git a/src/object/strings.go b/src/object/strings.go
new file mode 100644
index 0000000..213fa8c
--- /dev/null
+++ b/src/object/strings.go
@@ -0,0 +1,83 @@
+package object
+
+import (
+ "hash/fnv"
+ "strings"
+)
+
+type String struct {
+ Value string
+ offset int
+}
+
+func (s *String) Inspect() string { return s.Value }
+func (s *String) Type() ObjectType { return STRING_OBJ }
+func (s *String) HashKey() HashKey {
+ h := fnv.New64a()
+ h.Write([]byte(s.Value))
+
+ return HashKey{Type: s.Type(), Value: h.Sum64()}
+}
+func (s *String) Next() (Object, Object) {
+ offset := s.offset
+ if len(s.Value) > offset {
+ s.offset = offset + 1
+ return &Integer{Value: int64(offset)}, &String{Value: string(s.Value[offset])}
+ }
+ return nil, nil
+}
+func (s *String) Reset() {
+ s.offset = 0
+}
+func (s *String) Method(method string, args []Object) Object {
+ switch method {
+ case "idadi":
+ return s.len(args)
+ case "herufikubwa":
+ return s.upper(args)
+ case "herufindogo":
+ return s.lower(args)
+ case "gawa":
+ return s.split(args)
+ default:
+ return newError("Samahani, function hii haitumiki na Strings (Neno)")
+ }
+}
+
+func (s *String) len(args []Object) Object {
+ if len(args) != 0 {
+ return newError("Samahani, tunahitaji Hoja 0, wewe umeweka %d", len(args))
+ }
+ return &Integer{Value: int64(len(s.Value))}
+}
+
+func (s *String) upper(args []Object) Object {
+ if len(args) != 0 {
+ return newError("Samahani, tunahitaji Hoja 0, wewe umeweka %d", len(args))
+ }
+ return &String{Value: strings.ToUpper(s.Value)}
+}
+
+func (s *String) lower(args []Object) Object {
+ if len(args) != 0 {
+ return newError("Samahani, tunahitaji Hoja 0, wewe umeweka %d", len(args))
+ }
+ return &String{Value: strings.ToLower(s.Value)}
+}
+
+func (s *String) split(args []Object) Object {
+ if len(args) > 1 {
+ return newError("Samahani, tunahitaji Hoja 1 au 0, wewe umeweka %d", len(args))
+ }
+ sep := " "
+ if len(args) == 1 {
+ sep = args[0].(*String).Value
+ }
+ parts := strings.Split(s.Value, sep)
+ length := len(parts)
+ elements := make([]Object, length)
+ for k, v := range parts {
+ elements[k] = &String{Value: v}
+ }
+ return &Array{Elements: elements}
+}
diff --git a/src/parser/arrays.go b/src/parser/arrays.go
new file mode 100644
index 0000000..119a903
--- /dev/null
+++ b/src/parser/arrays.go
@@ -0,0 +1,37 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseArrayLiteral() ast.Expression {
+ array := &ast.ArrayLiteral{Token: p.curToken}
+
+ array.Elements = p.parseExpressionList(token.RBRACKET)
+
+ return array
+}
+
+func (p *Parser) parseExpressionList(end token.TokenType) []ast.Expression {
+ list := []ast.Expression{}
+
+ if p.peekTokenIs(end) {
+ p.nextToken()
+ return list
+ }
+
+ p.nextToken()
+ list = append(list, p.parseExpression(LOWEST))
+
+ for p.peekTokenIs(token.COMMA) {
+ p.nextToken()
+ p.nextToken()
+ list = append(list, p.parseExpression(LOWEST))
+ }
+
+ if !p.expectPeek(end) {
+ return nil
+ }
+ return list
+}
diff --git a/src/parser/assignment.go b/src/parser/assignment.go
new file mode 100644
index 0000000..72a8a20
--- /dev/null
+++ b/src/parser/assignment.go
@@ -0,0 +1,30 @@
+package parser
+
+import (
+ "fmt"
+
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+func (p *Parser) parseAssignmentExpression(exp ast.Expression) ast.Expression {
+ switch node := exp.(type) {
+ case *ast.Identifier, *ast.IndexExpression:
+ default:
+ if node != nil {
+ msg := fmt.Sprintf("Mstari %d:Tulitegemea kupata kitambulishi au array, badala yake tumepata: %s", p.curToken.Line, node.TokenLiteral())
+ p.errors = append(p.errors, msg)
+ } else {
+ msg := fmt.Sprintf("Mstari %d: Umekosea mkuu", p.curToken.Line)
+ p.errors = append(p.errors, msg)
+ }
+ return nil
+ }
+
+ ae := &ast.AssignmentExpression{Token: p.curToken, Left: exp}
+
+ p.nextToken()
+
+ ae.Value = p.parseExpression(LOWEST)
+
+ return ae
+}
diff --git a/src/parser/boolean.go b/src/parser/boolean.go
new file mode 100644
index 0000000..9ce95e6
--- /dev/null
+++ b/src/parser/boolean.go
@@ -0,0 +1,10 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseBoolean() ast.Expression {
+ return &ast.Boolean{Token: p.curToken, Value: p.curTokenIs(token.TRUE)}
+}
diff --git a/src/parser/break.go b/src/parser/break.go
new file mode 100644
index 0000000..6c4c8ef
--- /dev/null
+++ b/src/parser/break.go
@@ -0,0 +1,14 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseBreak() *ast.Break {
+ stmt := &ast.Break{Token: p.curToken}
+ for p.curTokenIs(token.SEMICOLON) {
+ p.nextToken()
+ }
+ return stmt
+}
diff --git a/src/parser/continue.go b/src/parser/continue.go
new file mode 100644
index 0000000..a66b85f
--- /dev/null
+++ b/src/parser/continue.go
@@ -0,0 +1,14 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseContinue() *ast.Continue {
+ stmt := &ast.Continue{Token: p.curToken}
+ for p.curTokenIs(token.SEMICOLON) {
+ p.nextToken()
+ }
+ return stmt
+}
diff --git a/src/parser/dict.go b/src/parser/dict.go
new file mode 100644
index 0000000..4b3b205
--- /dev/null
+++ b/src/parser/dict.go
@@ -0,0 +1,35 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseDictLiteral() ast.Expression {
+ dict := &ast.DictLiteral{Token: p.curToken}
+ dict.Pairs = make(map[ast.Expression]ast.Expression)
+
+ for !p.peekTokenIs(token.RBRACE) {
+ p.nextToken()
+ key := p.parseExpression(LOWEST)
+
+ if !p.expectPeek(token.COLON) {
+ return nil
+ }
+
+ p.nextToken()
+ value := p.parseExpression(LOWEST)
+
+ dict.Pairs[key] = value
+
+ if !p.peekTokenIs(token.RBRACE) && !p.expectPeek(token.COMMA) {
+ return nil
+ }
+ }
+
+ if !p.expectPeek(token.RBRACE) {
+ return nil
+ }
+
+ return dict
+}
diff --git a/src/parser/dot.go b/src/parser/dot.go
new file mode 100644
index 0000000..18086b8
--- /dev/null
+++ b/src/parser/dot.go
@@ -0,0 +1,18 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseMethod(obj ast.Expression) ast.Expression {
+ precedence := p.curPrecedence()
+ exp := &ast.MethodExpression{Token: p.curToken, Object: obj}
+ p.nextToken()
+ exp.Method = p.parseExpression(precedence)
+ if !p.expectPeek(token.LPAREN) {
+ return nil
+ }
+ exp.Arguments = p.parseExpressionList(token.RPAREN)
+ return exp
+}
diff --git a/src/parser/float.go b/src/parser/float.go
new file mode 100644
index 0000000..77f6075
--- /dev/null
+++ b/src/parser/float.go
@@ -0,0 +1,20 @@
+package parser
+
+import (
+ "fmt"
+ "strconv"
+
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+func (p *Parser) parseFloatLiteral() ast.Expression {
+ fl := &ast.FloatLiteral{Token: p.curToken}
+ value, err := strconv.ParseFloat(p.curToken.Literal, 64)
+ if err != nil {
+ msg := fmt.Sprintf("Mstari %d: Hatuwezi kuparse %q kama desimali", p.curToken.Line, p.curToken.Literal)
+ p.errors = append(p.errors, msg)
+ return nil
+ }
+ fl.Value = value
+ return fl
+}
diff --git a/src/parser/for.go b/src/parser/for.go
new file mode 100644
index 0000000..7e36038
--- /dev/null
+++ b/src/parser/for.go
@@ -0,0 +1,92 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseForExpression() ast.Expression {
+ expression := &ast.For{Token: p.curToken}
+ p.nextToken()
+ if !p.curTokenIs(token.IDENT) {
+ return nil
+ }
+ if !p.peekTokenIs(token.ASSIGN) {
+ return p.parseForInExpression(expression)
+ }
+
+ // In future will allow: kwa i = 0; i<10; i++ {andika(i)}
+ // expression.Identifier = p.curToken.Literal
+ // expression.StarterName = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+ // if expression.StarterName == nil {
+ // return nil
+ // }
+ // if !p.expectPeek(token.ASSIGN) {
+ // return nil
+ // }
+
+ // p.nextToken()
+
+ // expression.StarterValue = p.parseExpression(LOWEST)
+ // // expression.Starter = p.parseExpression(LOWEST)
+ // if expression.StarterValue == nil {
+ // return nil
+ // }
+ // p.nextToken()
+ // for p.curTokenIs(token.SEMICOLON) {
+ // p.nextToken()
+ // }
+ // expression.Condition = p.parseExpression(LOWEST)
+ // if expression.Condition == nil {
+ // return nil
+ // }
+ // p.nextToken()
+ // for p.curTokenIs(token.SEMICOLON) {
+ // p.nextToken()
+ // }
+ // expression.Closer = p.parseExpression(LOWEST)
+ // if expression.Closer == nil {
+ // return nil
+ // }
+ // p.nextToken()
+ // for p.curTokenIs(token.SEMICOLON) {
+ // p.nextToken()
+ // }
+ // if !p.curTokenIs(token.LBRACE) {
+ // return nil
+ // }
+ // expression.Block = p.parseBlockStatement()
+ // return expression
+ return nil
+}
+
+func (p *Parser) parseForInExpression(initialExpression *ast.For) ast.Expression {
+ expression := &ast.ForIn{Token: initialExpression.Token}
+ if !p.curTokenIs(token.IDENT) {
+ return nil
+ }
+ val := p.curToken.Literal
+ var key string
+ p.nextToken()
+ if p.curTokenIs(token.COMMA) {
+ p.nextToken()
+ if !p.curTokenIs(token.IDENT) {
+ return nil
+ }
+ key = val
+ val = p.curToken.Literal
+ p.nextToken()
+ }
+ expression.Key = key
+ expression.Value = val
+ if !p.curTokenIs(token.IN) {
+ return nil
+ }
+ p.nextToken()
+ expression.Iterable = p.parseExpression(LOWEST)
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+ expression.Block = p.parseBlockStatement()
+ return expression
+}
diff --git a/src/parser/function.go b/src/parser/function.go
new file mode 100644
index 0000000..a1d7dd7
--- /dev/null
+++ b/src/parser/function.go
@@ -0,0 +1,57 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseFunctionLiteral() ast.Expression {
+ lit := &ast.FunctionLiteral{Token: p.curToken}
+
+ if !p.expectPeek(token.LPAREN) {
+ return nil
+ }
+
+ lit.Parameters = p.parseFunctionParameters()
+
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+
+ lit.Body = p.parseBlockStatement()
+
+ return lit
+}
+
+func (p *Parser) parseFunctionParameters() []*ast.Identifier {
+ identifiers := []*ast.Identifier{}
+
+ if p.peekTokenIs(token.RPAREN) {
+ p.nextToken()
+ return identifiers
+ }
+
+ p.nextToken()
+
+ ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+ identifiers = append(identifiers, ident)
+
+ for p.peekTokenIs(token.COMMA) {
+ p.nextToken()
+ p.nextToken()
+ ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+ identifiers = append(identifiers, ident)
+ }
+
+ if !p.expectPeek(token.RPAREN) {
+ return nil
+ }
+
+ return identifiers
+}
+
+func (p *Parser) parseCallExpression(function ast.Expression) ast.Expression {
+ exp := &ast.CallExpression{Token: p.curToken, Function: function}
+ exp.Arguments = p.parseExpressionList(token.RPAREN)
+ return exp
+}
diff --git a/src/parser/identifier.go b/src/parser/identifier.go
new file mode 100644
index 0000000..d9d5b7e
--- /dev/null
+++ b/src/parser/identifier.go
@@ -0,0 +1,9 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+func (p *Parser) parseIdentifier() ast.Expression {
+ return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+}
diff --git a/src/parser/if.go b/src/parser/if.go
new file mode 100644
index 0000000..b3eccc6
--- /dev/null
+++ b/src/parser/if.go
@@ -0,0 +1,50 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseIfExpression() ast.Expression {
+ expression := &ast.IfExpression{Token: p.curToken}
+
+ if !p.expectPeek(token.LPAREN) {
+ return nil
+ }
+
+ p.nextToken()
+ expression.Condition = p.parseExpression(LOWEST)
+
+ if !p.expectPeek(token.RPAREN) {
+ return nil
+ }
+
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+
+ expression.Consequence = p.parseBlockStatement()
+
+ if p.peekTokenIs(token.ELSE) {
+ p.nextToken()
+ if p.peekTokenIs(token.IF) {
+ p.nextToken()
+ expression.Alternative = &ast.BlockStatement{
+ Statements: []ast.Statement{
+ &ast.ExpressionStatement{
+ Expression: p.parseIfExpression(),
+ },
+ },
+ }
+ return expression
+ }
+
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+
+ expression.Alternative = p.parseBlockStatement()
+ }
+
+ return expression
+}
diff --git a/src/parser/index.go b/src/parser/index.go
new file mode 100644
index 0000000..9104bc6
--- /dev/null
+++ b/src/parser/index.go
@@ -0,0 +1,18 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseIndexExpression(left ast.Expression) ast.Expression {
+ exp := &ast.IndexExpression{Token: p.curToken, Left: left}
+
+ p.nextToken()
+ exp.Index = p.parseExpression(LOWEST)
+ if !p.expectPeek(token.RBRACKET) {
+ return nil
+ }
+
+ return exp
+}
diff --git a/src/parser/integer.go b/src/parser/integer.go
new file mode 100644
index 0000000..c35bcd2
--- /dev/null
+++ b/src/parser/integer.go
@@ -0,0 +1,22 @@
+package parser
+
+import (
+ "fmt"
+ "strconv"
+
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+func (p *Parser) parseIntegerLiteral() ast.Expression {
+ lit := &ast.IntegerLiteral{Token: p.curToken}
+
+ value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
+ if err != nil {
+ msg := fmt.Sprintf("Mstari %d: Hatuwezi kuparse %q kama namba", p.curToken.Line, p.curToken.Literal)
+ p.errors = append(p.errors, msg)
+ return nil
+ }
+ lit.Value = value
+
+ return lit
+}
diff --git a/src/parser/null.go b/src/parser/null.go
new file mode 100644
index 0000000..2f4c869
--- /dev/null
+++ b/src/parser/null.go
@@ -0,0 +1,9 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+func (p *Parser) parseNull() ast.Expression {
+ return &ast.Null{Token: p.curToken}
+}
diff --git a/src/parser/parser.go b/src/parser/parser.go
new file mode 100644
index 0000000..515397d
--- /dev/null
+++ b/src/parser/parser.go
@@ -0,0 +1,314 @@
+package parser
+
+import (
+ "fmt"
+
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/lexer"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+const (
+ // Think of BODMAS
+ _ int = iota
+ LOWEST
+ COND // OR or AND
+ ASSIGN // =
+ EQUALS // ==
+ LESSGREATER // > OR <
+ SUM // +
+ PRODUCT // *
+ POWER // ** we got the power XD
+ MODULUS // %
+ PREFIX // -X OR !X
+ CALL // myFunction(X)
+ INDEX // Arrays
+ DOT // For methods
+)
+
+var precedences = map[token.TokenType]int{
+ token.AND: COND,
+ token.OR: COND,
+ token.IN: COND,
+ token.ASSIGN: ASSIGN,
+ token.EQ: EQUALS,
+ token.NOT_EQ: EQUALS,
+ token.LT: LESSGREATER,
+ token.LTE: LESSGREATER,
+ token.GT: LESSGREATER,
+ token.GTE: LESSGREATER,
+ token.PLUS: SUM,
+ token.PLUS_ASSIGN: SUM,
+ token.MINUS: SUM,
+ token.MINUS_ASSIGN: SUM,
+ token.SLASH: PRODUCT,
+ token.SLASH_ASSIGN: PRODUCT,
+ token.ASTERISK: PRODUCT,
+ token.ASTERISK_ASSIGN: PRODUCT,
+ token.POW: POWER,
+ token.MODULUS: MODULUS,
+ token.MODULUS_ASSIGN: MODULUS,
+ // token.BANG: PREFIX,
+ token.LPAREN: CALL,
+ token.LBRACKET: INDEX,
+ token.DOT: DOT, // Highest priority
+}
+
+type (
+ prefixParseFn func() ast.Expression
+ infixParseFn func(ast.Expression) ast.Expression
+ postfixParseFn func() ast.Expression
+)
+
+type Parser struct {
+ l *lexer.Lexer
+
+ curToken token.Token
+ peekToken token.Token
+ prevToken token.Token
+
+ errors []string
+
+ prefixParseFns map[token.TokenType]prefixParseFn
+ infixParseFns map[token.TokenType]infixParseFn
+ postfixParseFns map[token.TokenType]postfixParseFn
+}
+
+func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
+ p.prefixParseFns[tokenType] = fn
+}
+
+func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
+ p.infixParseFns[tokenType] = fn
+}
+
+func (p *Parser) registerPostfix(tokenType token.TokenType, fn postfixParseFn) {
+ p.postfixParseFns[tokenType] = fn
+}
+
+func New(l *lexer.Lexer) *Parser {
+ p := &Parser{l: l, errors: []string{}}
+
+ // Gotta set these niggas
+ p.nextToken()
+ p.nextToken()
+
+ p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
+ p.registerPrefix(token.STRING, p.parseStringLiteral)
+ p.registerPrefix(token.IDENT, p.parseIdentifier)
+ p.registerPrefix(token.INT, p.parseIntegerLiteral)
+ p.registerPrefix(token.FLOAT, p.parseFloatLiteral)
+ p.registerPrefix(token.BANG, p.parsePrefixExpression)
+ p.registerPrefix(token.MINUS, p.parsePrefixExpression)
+ p.registerPrefix(token.PLUS, p.parsePrefixExpression)
+ p.registerPrefix(token.TRUE, p.parseBoolean)
+ p.registerPrefix(token.FALSE, p.parseBoolean)
+ p.registerPrefix(token.LPAREN, p.parseGroupedExpression)
+ p.registerPrefix(token.IF, p.parseIfExpression)
+ p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
+ p.registerPrefix(token.LBRACKET, p.parseArrayLiteral)
+ p.registerPrefix(token.LBRACE, p.parseDictLiteral)
+ p.registerPrefix(token.WHILE, p.parseWhileExpression)
+ p.registerPrefix(token.NULL, p.parseNull)
+ p.registerPrefix(token.FOR, p.parseForExpression)
+ p.registerPrefix(token.SWITCH, p.parseSwitchStatement)
+
+ p.infixParseFns = make(map[token.TokenType]infixParseFn)
+ p.registerInfix(token.AND, p.parseInfixExpression)
+ p.registerInfix(token.OR, p.parseInfixExpression)
+ p.registerInfix(token.PLUS, p.parseInfixExpression)
+ p.registerInfix(token.PLUS_ASSIGN, p.parseAssignmentExpression)
+ p.registerInfix(token.MINUS, p.parseInfixExpression)
+ p.registerInfix(token.MINUS_ASSIGN, p.parseAssignmentExpression)
+ p.registerInfix(token.SLASH, p.parseInfixExpression)
+ p.registerInfix(token.SLASH_ASSIGN, p.parseAssignmentExpression)
+ p.registerInfix(token.ASTERISK, p.parseInfixExpression)
+ p.registerInfix(token.ASTERISK_ASSIGN, p.parseAssignmentExpression)
+ p.registerInfix(token.POW, p.parseInfixExpression)
+ p.registerInfix(token.MODULUS, p.parseInfixExpression)
+ p.registerInfix(token.MODULUS_ASSIGN, p.parseAssignmentExpression)
+ p.registerInfix(token.EQ, p.parseInfixExpression)
+ p.registerInfix(token.NOT_EQ, p.parseInfixExpression)
+ p.registerInfix(token.LT, p.parseInfixExpression)
+ p.registerInfix(token.LTE, p.parseInfixExpression)
+ p.registerInfix(token.GT, p.parseInfixExpression)
+ p.registerInfix(token.GTE, p.parseInfixExpression)
+ p.registerInfix(token.LPAREN, p.parseCallExpression)
+ p.registerInfix(token.LBRACKET, p.parseIndexExpression)
+ p.registerInfix(token.ASSIGN, p.parseAssignmentExpression)
+ p.registerInfix(token.IN, p.parseInfixExpression)
+ p.registerInfix(token.DOT, p.parseMethod)
+
+ p.postfixParseFns = make(map[token.TokenType]postfixParseFn)
+ p.registerPostfix(token.PLUS_PLUS, p.parsePostfixExpression)
+ p.registerPostfix(token.MINUS_MINUS, p.parsePostfixExpression)
+
+ return p
+}
+
+func (p *Parser) ParseProgram() *ast.Program {
+ program := &ast.Program{}
+ program.Statements = []ast.Statement{}
+
+ for !p.curTokenIs(token.EOF) {
+ stmt := p.parseStatement()
+ program.Statements = append(program.Statements, stmt)
+
+ p.nextToken()
+ }
+ return program
+}
+
+// manage token literals:
+
+func (p *Parser) nextToken() {
+ p.prevToken = p.curToken
+ p.curToken = p.peekToken
+ p.peekToken = p.l.NextToken()
+}
+
+func (p *Parser) curTokenIs(t token.TokenType) bool {
+ return p.curToken.Type == t
+}
+
+func (p *Parser) peekTokenIs(t token.TokenType) bool {
+ return p.peekToken.Type == t
+}
+
+func (p *Parser) expectPeek(t token.TokenType) bool {
+ if p.peekTokenIs(t) {
+ p.nextToken()
+ return true
+ } else {
+ p.peekError(t)
+ return false
+ }
+}
+
+func (p *Parser) peekPrecedence() int {
+ if p, ok := precedences[p.peekToken.Type]; ok {
+ return p
+ }
+ return LOWEST
+}
+
+func (p *Parser) curPrecedence() int {
+ if p, ok := precedences[p.curToken.Type]; ok {
+ return p
+ }
+
+ return LOWEST
+}
+
+// error messages
+
+func (p *Parser) Errors() []string {
+ return p.errors
+}
+
+func (p *Parser) peekError(t token.TokenType) {
+ msg := fmt.Sprintf("Mstari %d: Tulitegemea kupata %s, badala yake tumepata %s", p.curToken.Line, t, p.peekToken.Type)
+ p.errors = append(p.errors, msg)
+}
+
+// parse expressions
+
+func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
+ stmt := &ast.ExpressionStatement{Token: p.curToken}
+
+ stmt.Expression = p.parseExpression(LOWEST)
+
+ if p.peekTokenIs(token.SEMICOLON) {
+ p.nextToken()
+ }
+
+ return stmt
+}
+
+func (p *Parser) parseExpression(precedence int) ast.Expression {
+ postfix := p.postfixParseFns[p.curToken.Type]
+ if postfix != nil {
+ return (postfix())
+ }
+ prefix := p.prefixParseFns[p.curToken.Type]
+ if prefix == nil {
+ p.noPrefixParseFnError(p.curToken.Type)
+ return nil
+ }
+ leftExp := prefix()
+
+ for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
+ infix := p.infixParseFns[p.peekToken.Type]
+ if infix == nil {
+ p.noInfixParseFnError(p.peekToken.Type)
+ return nil
+ }
+
+ p.nextToken()
+ leftExp = infix(leftExp)
+ }
+ return leftExp
+
+}
+
+// prefix expressions
+
+func (p *Parser) parsePrefixExpression() ast.Expression {
+ expression := &ast.PrefixExpression{
+ Token: p.curToken,
+ Operator: p.curToken.Literal,
+ }
+
+ p.nextToken()
+
+ expression.Right = p.parseExpression(PREFIX)
+
+ return expression
+}
+
+func (p *Parser) noPrefixParseFnError(t token.TokenType) {
+ msg := fmt.Sprintf("Mstari %d: Tumeshindwa kuparse %s", p.curToken.Line, t)
+ p.errors = append(p.errors, msg)
+}
+
+// infix expressions
+
+func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
+ expression := &ast.InfixExpression{
+ Token: p.curToken,
+ Operator: p.curToken.Literal,
+ Left: left,
+ }
+
+ precedence := p.curPrecedence()
+ p.nextToken()
+ expression.Right = p.parseExpression(precedence)
+ return expression
+}
+
+func (p *Parser) noInfixParseFnError(t token.TokenType) {
+ msg := fmt.Sprintf("Mstari %d: Tumeshindwa kuparse %s", p.curToken.Line, t)
+ p.errors = append(p.errors, msg)
+}
+
+func (p *Parser) parseGroupedExpression() ast.Expression {
+ p.nextToken()
+
+ exp := p.parseExpression(LOWEST)
+
+ if !p.expectPeek(token.RPAREN) {
+ return nil
+ }
+
+ return exp
+}
+
+// postfix expressions
+
+func (p *Parser) parsePostfixExpression() ast.Expression {
+ expression := &ast.PostfixExpression{
+ Token: p.prevToken,
+ Operator: p.curToken.Literal,
+ }
+ return expression
+}
diff --git a/parser/parser_test.go b/src/parser/parser_test.go
similarity index 88%
rename from parser/parser_test.go
rename to src/parser/parser_test.go
index 19bd04e..631f765 100644
--- a/parser/parser_test.go
+++ b/src/parser/parser_test.go
@@ -14,9 +14,9 @@ func TestLetStatements(t *testing.T) {
expectedIdentifier string
expectedValue interface{}
}{
- {"acha x = 5;", "x", 5},
- {"acha y = x;", "y", "x"},
- {"acha bangi = y;", "bangi", "y"},
+ {"fanya x = 5;", "x", 5},
+ {"fanya y = x;", "y", "x"},
+ {"fanya bangi = y;", "bangi", "y"},
}
for _, tt := range tests {
@@ -43,8 +43,8 @@ func TestLetStatements(t *testing.T) {
}
func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
- if s.TokenLiteral() != "acha" {
- t.Errorf("s.TokenLiteral not 'acha', got = %q", s.TokenLiteral())
+ if s.TokenLiteral() != "fanya" {
+ t.Errorf("s.TokenLiteral not 'fanya', got = %q", s.TokenLiteral())
return false
}
@@ -633,7 +633,7 @@ func TestIfElseExpression(t *testing.T) {
}
func TestFunctionLiteralParsing(t *testing.T) {
- input := `fn(x, y) {x + y}`
+ input := `unda(x, y) {x + y}`
l := lexer.New(input)
p := New(l)
@@ -678,9 +678,9 @@ func TestFunctionParameterParsing(t *testing.T) {
input string
expectedParams []string
}{
- {input: "fn() {};", expectedParams: []string{}},
- {input: "fn(x) {};", expectedParams: []string{"x"}},
- {input: "fn(x, y, z) {};", expectedParams: []string{"x", "y", "z"}},
+ {input: "unda() {};", expectedParams: []string{}},
+ {input: "unda(x) {};", expectedParams: []string{"x"}},
+ {input: "unda(x, y, z) {};", expectedParams: []string{"x", "y", "z"}},
}
for _, tt := range tests {
@@ -968,3 +968,96 @@ func TestParsingEmptyDict(t *testing.T) {
t.Errorf("Dict pairs has wrong length, got=%d", len(dict.Pairs))
}
}
+
+func TestWhileLoop(t *testing.T) {
+ input := `wakati ( x > y ) { fanya x = 2 }`
+
+ l := lexer.New(input)
+ p := New(l)
+ program := p.ParseProgram()
+ checkParserErrors(t, p)
+
+ if len(program.Statements) != 1 {
+ t.Fatalf("program.Body does not contain %d statements. got=%d", 1, len(program.Statements))
+ }
+
+ fmt.Println(program.Statements)
+
+ stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+ if !ok {
+ t.Fatalf("program.Statements[0] is not ast.Expression, got=%T", program.Statements[0])
+ }
+ exp, ok := stmt.Expression.(*ast.WhileExpression)
+
+ if !ok {
+ t.Fatalf("stmt.Expression is not ast.WhileExpression. got=%T", stmt.Expression)
+ }
+
+ if !testInfixExpression(t, exp.Condition, "x", ">", "y") {
+ return
+ }
+
+ if len(exp.Consequence.Statements) != 1 {
+ t.Errorf("Consequence is not 1 statements. got=%d\n", len(exp.Consequence.Statements))
+ }
+
+ consequence, ok := exp.Consequence.Statements[0].(*ast.LetStatement)
+
+ if !ok {
+ t.Fatalf("exp.Consequence.Statements[0] is not ast.ExpressionStatement. got=%T", exp.Consequence.Statements[0])
+ }
+
+ if !testLetStatement(t, consequence, "x") {
+ t.Fatalf("exp.Consequence is not LetStatement")
+ }
+}
+
+func TestShorthandAssignment(t *testing.T) {
+ input := []string{
+ "fanya x = 10; x *= 20;",
+ "fanya x = 5; x += 4;",
+ "fanya x = 7; x /= 2;",
+ "fanya x = 8; x -= 1;",
+ "fanya x = 5; x++;",
+ "fanya x = 3; x--;",
+ "fanya x = 40; fanya y = 13; x += y;"}
+
+ for _, txt := range input {
+ l := lexer.New(txt)
+ p := New(l)
+ _ = p.ParseProgram()
+ checkParserErrors(t, p)
+ }
+}
+
+func TestForExpression(t *testing.T) {
+ input := `kwa i, v ktk j {andika(i)}`
+
+ l := lexer.New(input)
+ p := New(l)
+ program := p.ParseProgram()
+ checkParserErrors(t, p)
+
+ if len(program.Statements) != 1 {
+ t.Fatalf("program.Body does not contain %d statements. got=%d", 1, len(program.Statements))
+ }
+
+ stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+ if !ok {
+ t.Fatalf("program.Statements[0] is not ast.Expression, got=%T", program.Statements[0])
+ }
+
+ exp, ok := stmt.Expression.(*ast.ForIn)
+
+ if !ok {
+ t.Fatalf("stmt.Expression is not ast.ForIn. got=%T", stmt.Expression)
+ }
+
+ if exp.Key != "i" {
+ t.Fatalf("Wrong Key Index, expected 'i' got %s", exp.Key)
+ }
+
+ if exp.Value != "v" {
+ t.Fatalf("Wrong Value Index, expected 'v' got %s", exp.Value)
+ }
+}
diff --git a/src/parser/statements.go b/src/parser/statements.go
new file mode 100644
index 0000000..c129c4f
--- /dev/null
+++ b/src/parser/statements.go
@@ -0,0 +1,81 @@
+package parser
+
+import (
+ "fmt"
+
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseStatement() ast.Statement {
+ // Remember to add switch statements to the language
+ switch p.curToken.Type {
+ case token.LET:
+ return p.parseLetStatment()
+ case token.RETURN:
+ return p.parseReturnStatement()
+ case token.BREAK:
+ return p.parseBreak()
+ case token.CONTINUE:
+ return p.parseContinue()
+ default:
+ return p.parseExpressionStatement()
+ }
+}
+
+func (p *Parser) parseLetStatment() *ast.LetStatement {
+ stmt := &ast.LetStatement{Token: p.curToken}
+
+ if !p.expectPeek(token.IDENT) {
+ return nil
+ }
+
+ stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+
+ if !p.expectPeek(token.ASSIGN) {
+ return nil
+ }
+
+ p.nextToken()
+
+ stmt.Value = p.parseExpression(LOWEST)
+
+ if p.peekTokenIs(token.SEMICOLON) {
+ p.nextToken()
+ }
+
+ return stmt
+}
+
+func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
+ stmt := &ast.ReturnStatement{Token: p.curToken}
+ p.nextToken()
+
+ stmt.ReturnValue = p.parseExpression(LOWEST)
+
+ if p.peekTokenIs(token.SEMICOLON) {
+ p.nextToken()
+ }
+
+ return stmt
+}
+
+func (p *Parser) parseBlockStatement() *ast.BlockStatement {
+ block := &ast.BlockStatement{Token: p.curToken}
+ block.Statements = []ast.Statement{}
+
+ p.nextToken()
+
+ for !p.curTokenIs(token.RBRACE) {
+ if p.curTokenIs(token.EOF) {
+ msg := fmt.Sprintf("Mstari %d: Hukufunga Mabano '}'", p.curToken.Line)
+ p.errors = append(p.errors, msg)
+ return nil
+ }
+ stmt := p.parseStatement()
+ block.Statements = append(block.Statements, stmt)
+ p.nextToken()
+ }
+
+ return block
+}
diff --git a/src/parser/string.go b/src/parser/string.go
new file mode 100644
index 0000000..cfd90f2
--- /dev/null
+++ b/src/parser/string.go
@@ -0,0 +1,9 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+)
+
+func (p *Parser) parseStringLiteral() ast.Expression {
+ return &ast.StringLiteral{Token: p.curToken, Value: p.curToken.Literal}
+}
diff --git a/src/parser/switch.go b/src/parser/switch.go
new file mode 100644
index 0000000..88ce460
--- /dev/null
+++ b/src/parser/switch.go
@@ -0,0 +1,89 @@
+package parser
+
+import (
+ "fmt"
+
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseSwitchStatement() ast.Expression {
+ expression := &ast.SwitchExpression{Token: p.curToken}
+
+ if !p.expectPeek(token.LPAREN) {
+ return nil
+ }
+
+ p.nextToken()
+ expression.Value = p.parseExpression(LOWEST)
+
+ if expression.Value == nil {
+ return nil
+ }
+
+ if !p.expectPeek(token.RPAREN) {
+ return nil
+ }
+
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+ p.nextToken()
+
+ for !p.curTokenIs(token.RBRACE) {
+
+ if p.curTokenIs(token.EOF) {
+ msg := fmt.Sprintf("Mstari %d: Haukufunga ENDAPO (SWITCH)", p.curToken.Line)
+ p.errors = append(p.errors, msg)
+ return nil
+ }
+ tmp := &ast.CaseExpression{Token: p.curToken}
+
+ if p.curTokenIs(token.DEFAULT) {
+
+ tmp.Default = true
+
+ } else if p.curTokenIs(token.CASE) {
+
+ p.nextToken()
+
+ if p.curTokenIs(token.DEFAULT) {
+ tmp.Default = true
+ } else {
+ tmp.Expr = append(tmp.Expr, p.parseExpression(LOWEST))
+ for p.peekTokenIs(token.COMMA) {
+ p.nextToken()
+ p.nextToken()
+ tmp.Expr = append(tmp.Expr, p.parseExpression(LOWEST))
+ }
+ }
+ } else {
+ msg := fmt.Sprintf("Mstari %d: Tulitegemea Kauli IKIWA (CASE) au KAWAIDA (DEFAULT) lakini tumepewa: %s", p.curToken.Line, p.curToken.Type)
+ p.errors = append(p.errors, msg)
+ return nil
+ }
+
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+
+ tmp.Block = p.parseBlockStatement()
+ p.nextToken()
+ expression.Choices = append(expression.Choices, tmp)
+ }
+
+ count := 0
+ for _, c := range expression.Choices {
+ if c.Default {
+ count++
+ }
+ }
+ if count > 1 {
+ msg := fmt.Sprintf("Kauli ENDAPO (SWITCH) hua na kauli 'KAWAIDA' (DEFAULT) moja tu! Wewe umeweka %d", count)
+ p.errors = append(p.errors, msg)
+ return nil
+
+ }
+ return expression
+
+}
diff --git a/src/parser/while.go b/src/parser/while.go
new file mode 100644
index 0000000..3ae92fc
--- /dev/null
+++ b/src/parser/while.go
@@ -0,0 +1,29 @@
+package parser
+
+import (
+ "github.com/AvicennaJr/Nuru/ast"
+ "github.com/AvicennaJr/Nuru/token"
+)
+
+func (p *Parser) parseWhileExpression() ast.Expression {
+ expression := &ast.WhileExpression{Token: p.curToken}
+
+ if !p.expectPeek(token.LPAREN) {
+ return nil
+ }
+
+ p.nextToken()
+ expression.Condition = p.parseExpression(LOWEST)
+
+ if !p.expectPeek(token.RPAREN) {
+ return nil
+ }
+
+ if !p.expectPeek(token.LBRACE) {
+ return nil
+ }
+
+ expression.Consequence = p.parseBlockStatement()
+
+ return expression
+}
diff --git a/repl/repl.go b/src/repl/repl.go
similarity index 93%
rename from repl/repl.go
rename to src/repl/repl.go
index bd99864..9700152 100644
--- a/repl/repl.go
+++ b/src/repl/repl.go
@@ -59,7 +59,9 @@ func Read(contents string) {
}
evaluated := evaluator.Eval(program, env)
if evaluated != nil {
- fmt.Println(colorfy(evaluated.Inspect(), 32))
+ if evaluated.Type() != object.NULL_OBJ {
+ fmt.Println(colorfy(evaluated.Inspect(), 32))
+ }
}
}
@@ -92,8 +94,10 @@ func Start(in io.Reader, out io.Writer) {
}
evaluated := evaluator.Eval(program, env)
if evaluated != nil {
- io.WriteString(out, colorfy(evaluated.Inspect(), 32))
- io.WriteString(out, "\n")
+ if evaluated.Type() != object.NULL_OBJ {
+ io.WriteString(out, colorfy(evaluated.Inspect(), 32))
+ io.WriteString(out, "\n")
+ }
}
}
}
diff --git a/src/token/token.go b/src/token/token.go
new file mode 100644
index 0000000..1620fde
--- /dev/null
+++ b/src/token/token.go
@@ -0,0 +1,102 @@
+package token
+
+type TokenType string
+
+type Token struct {
+ Type TokenType
+ Literal string
+ Line int
+}
+
+const (
+ ILLEGAL = "HARAMU"
+ EOF = "MWISHO"
+
+ // Identifiers + literals
+ IDENT = "KITAMBULISHI"
+ INT = "NAMBA"
+ STRING = "NENO"
+ FLOAT = "DESIMALI"
+
+ // Operators
+ ASSIGN = "="
+ PLUS = "+"
+ MINUS = "-"
+ BANG = "!"
+ ASTERISK = "*"
+ POW = "**"
+ SLASH = "/"
+ MODULUS = "%"
+ LT = "<"
+ LTE = "<="
+ GT = ">"
+ GTE = ">="
+ EQ = "=="
+ NOT_EQ = "!="
+ AND = "&&"
+ OR = "||"
+ PLUS_ASSIGN = "+="
+ PLUS_PLUS = "++"
+ MINUS_ASSIGN = "-="
+ MINUS_MINUS = "--"
+ ASTERISK_ASSIGN = "*="
+ SLASH_ASSIGN = "/="
+ MODULUS_ASSIGN = "%="
+
+ //Delimiters
+ COMMA = ","
+ SEMICOLON = ";"
+ LPAREN = "("
+ RPAREN = ")"
+ LBRACE = "{"
+ RBRACE = "}"
+ LBRACKET = "["
+ RBRACKET = "]"
+ COLON = ":"
+ DOT = "."
+
+ // Keywords
+ FUNCTION = "FUNCTION"
+ LET = "FANYA"
+ TRUE = "KWELI"
+ FALSE = "SIKWELI"
+ IF = "KAMA"
+ ELSE = "SIVYO"
+ RETURN = "RUDISHA"
+ WHILE = "WAKATI"
+ NULL = "TUPU"
+ BREAK = "VUNJA"
+ CONTINUE = "ENDELEA"
+ IN = "KTK"
+ FOR = "KWA"
+ SWITCH = "BADILI"
+ CASE = "IKIWA"
+ DEFAULT = "KAWAIDA"
+)
+
+var keywords = map[string]TokenType{
+ "unda": FUNCTION,
+ "fanya": LET,
+ "kweli": TRUE,
+ "sikweli": FALSE,
+ "kama": IF,
+ "au": ELSE,
+ "sivyo": ELSE,
+ "wakati": WHILE,
+ "rudisha": RETURN,
+ "vunja": BREAK,
+ "endelea": CONTINUE,
+ "tupu": NULL,
+ "ktk": IN,
+ "kwa": FOR,
+ "badili": SWITCH,
+ "ikiwa": CASE,
+ "kawaida": DEFAULT,
+}
+
+func LookupIdent(ident string) TokenType {
+ if tok, ok := keywords[ident]; ok {
+ return tok
+ }
+ return IDENT
+}
diff --git a/token/token.go b/token/token.go
deleted file mode 100644
index e9f006d..0000000
--- a/token/token.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package token
-
-type TokenType string
-
-type Token struct {
- Type TokenType
- Literal string
-}
-
-const (
- ILLEGAL = "HARAMU"
- EOF = "MWISHO"
-
- // Identifiers + literals
- IDENT = "KITAMBULISHI"
- INT = "NAMBA"
- STRING = "NENO"
-
- // Operators
- ASSIGN = "="
- PLUS = "+"
- MINUS = "-"
- BANG = "!"
- ASTERISK = "*"
- SLASH = "/"
- LT = "<"
- GT = ">"
- EQ = "=="
- NOT_EQ = "!="
-
- //Delimiters
- COMMA = ","
- SEMICOLON = ";"
- LPAREN = "("
- RPAREN = ")"
- LBRACE = "{"
- RBRACE = "}"
- LBRACKET = "["
- RBRACKET = "]"
- COLON = ":"
-
- // Keywords
- FUNCTION = "FUNCTION"
- LET = "ACHA"
- TRUE = "KWELI"
- FALSE = "SIKWELI"
- IF = "KAMA"
- ELSE = "SIVYO"
- RETURN = "RUDISHA"
- WHILE = "WAKATI"
-)
-
-var keywords = map[string]TokenType{
- "fn": FUNCTION,
- "acha": LET,
- "kweli": TRUE,
- "sikweli": FALSE,
- "kama": IF,
- "au": ELSE,
- "sivyo": ELSE,
- "wakati": WHILE,
- "rudisha": RETURN,
-}
-
-func LookupIdent(ident string) TokenType {
- if tok, ok := keywords[ident]; ok {
- return tok
- }
- return IDENT
-}