change workflow to support other plateforms
This commit is contained in:
37
.github/workflows/release.yml
vendored
37
.github/workflows/release.yml
vendored
@@ -11,7 +11,10 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_and_release:
|
build_and_release:
|
||||||
runs-on: ubuntu-latest
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -28,15 +31,18 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install conan
|
pip install conan
|
||||||
|
shell: ${{ matrix.os == 'windows-latest' && 'pwsh' || 'bash' }}
|
||||||
|
|
||||||
- name: Configure Conan
|
- name: Configure Conan
|
||||||
run: |
|
run: conan profile detect
|
||||||
conan profile detect
|
shell: ${{ matrix.os == 'windows-latest' && 'pwsh' || 'bash' }}
|
||||||
|
|
||||||
- name: Install dependencies and build with Conan
|
- name: Install dependencies and build with Conan
|
||||||
run: |
|
run: |
|
||||||
conan install . --build=missing
|
conan install . --build=missing
|
||||||
conan build .
|
conan build .
|
||||||
|
shell: ${{ matrix.os == 'windows-latest' && 'pwsh' || 'bash' }}
|
||||||
|
|
||||||
- name: Determine if prerelease
|
- name: Determine if prerelease
|
||||||
id: prerelease_check
|
id: prerelease_check
|
||||||
run: |
|
run: |
|
||||||
@@ -47,16 +53,39 @@ jobs:
|
|||||||
echo "false" > prerelease.txt
|
echo "false" > prerelease.txt
|
||||||
fi
|
fi
|
||||||
echo "prerelease=$(cat prerelease.txt)" >> $GITHUB_OUTPUT
|
echo "prerelease=$(cat prerelease.txt)" >> $GITHUB_OUTPUT
|
||||||
|
shell: ${{ matrix.os == 'windows-latest' && 'pwsh' || 'bash' }}
|
||||||
|
|
||||||
- name: Archive build folder
|
- name: Archive build folder
|
||||||
run: |
|
run: |
|
||||||
TAG=${GITHUB_REF##refs/tags/}
|
TAG=${GITHUB_REF##refs/tags/}
|
||||||
|
if [[ "$RUNNER_OS" == "Linux" ]]; then
|
||||||
|
OS=linux
|
||||||
ARCH=$(uname -m)
|
ARCH=$(uname -m)
|
||||||
FOLDER_NAME="chloride-$TAG-linux-${ARCH}"
|
FOLDER_NAME="chloride-$TAG-$OS-$ARCH"
|
||||||
TAR_NAME="$FOLDER_NAME.tar.gz"
|
TAR_NAME="$FOLDER_NAME.tar.gz"
|
||||||
mv build/bin "$FOLDER_NAME"
|
mv build/bin "$FOLDER_NAME"
|
||||||
cp LICENSE "$FOLDER_NAME"
|
cp LICENSE "$FOLDER_NAME"
|
||||||
tar -czf "$TAR_NAME" "$FOLDER_NAME"
|
tar -czf "$TAR_NAME" "$FOLDER_NAME"
|
||||||
|
elif [[ "$RUNNER_OS" == "macOS" ]]; then
|
||||||
|
OS=macos
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
FOLDER_NAME="chloride-$TAG-$OS-$ARCH"
|
||||||
|
TAR_NAME="$FOLDER_NAME.tar.gz"
|
||||||
|
mv build/bin "$FOLDER_NAME"
|
||||||
|
cp LICENSE "$FOLDER_NAME"
|
||||||
|
tar -czf "$TAR_NAME" "$FOLDER_NAME"
|
||||||
|
else
|
||||||
|
# Windows
|
||||||
|
$TAG = $env:GITHUB_REF -replace 'refs/tags/', ''
|
||||||
|
$ARCH = if ([Environment]::Is64BitOperatingSystem) { 'x64' } else { 'x86' }
|
||||||
|
$FOLDER_NAME = "chloride-$TAG-windows-$ARCH"
|
||||||
|
$TAR_NAME = "$FOLDER_NAME.zip"
|
||||||
|
Rename-Item build\bin $FOLDER_NAME
|
||||||
|
Copy-Item LICENSE $FOLDER_NAME
|
||||||
|
Compress-Archive -Path $FOLDER_NAME -DestinationPath $TAR_NAME
|
||||||
|
fi
|
||||||
echo "TAR_NAME=$TAR_NAME" >> $GITHUB_ENV
|
echo "TAR_NAME=$TAR_NAME" >> $GITHUB_ENV
|
||||||
|
shell: ${{ matrix.os == 'windows-latest' && 'pwsh' || 'bash' }}
|
||||||
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
id: create_release
|
id: create_release
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ void darray_resize(DArray *arr, size_t new_size) {
|
|||||||
|
|
||||||
// Determine number of full chunks needed to store new_size elements
|
// Determine number of full chunks needed to store new_size elements
|
||||||
size_t required_bytes = new_size * arr->element_size;
|
size_t required_bytes = new_size * arr->element_size;
|
||||||
size_t new_capacity_bytes = ((required_bytes + CHUNK_SIZE - 1) / CHUNK_SIZE) * CHUNK_SIZE;
|
size_t new_capacity_bytes = required_bytes*2;
|
||||||
size_t new_capacity = new_capacity_bytes / arr->element_size;
|
size_t new_capacity = new_capacity_bytes / arr->element_size;
|
||||||
if (!new_capacity) {
|
if (!new_capacity) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
18
src/main.c
18
src/main.c
@@ -158,7 +158,7 @@ int load_cache(Translated *translated_dest, char *joined_paths, uint64_t hash,
|
|||||||
char *source_path) {
|
char *source_path) {
|
||||||
FILE *bytecode_file = fopen(joined_paths, "rb");
|
FILE *bytecode_file = fopen(joined_paths, "rb");
|
||||||
if (!bytecode_file) {
|
if (!bytecode_file) {
|
||||||
printf("cache doesnt exist... compiling from source.\n");
|
fprintf(stderr,"cache doesnt exist... compiling from source.\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -194,7 +194,7 @@ int load_cache(Translated *translated_dest, char *joined_paths, uint64_t hash,
|
|||||||
XXH64_freeState(state);
|
XXH64_freeState(state);
|
||||||
|
|
||||||
if (calc_hash != stored_hash) {
|
if (calc_hash != stored_hash) {
|
||||||
printf("cache hash mismatch (corrupted?)\n");
|
fprintf(stderr,"cache hash mismatch (corrupted?)\n");
|
||||||
goto FAILED;
|
goto FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -268,11 +268,11 @@ int load_cache(Translated *translated_dest, char *joined_paths, uint64_t hash,
|
|||||||
goto FAILED;
|
goto FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
printf("cache exists and is valid, so will be used.\n");
|
fprintf(stderr,"cache exists and is valid, so will be used.\n");
|
||||||
fclose(bytecode_file);
|
fclose(bytecode_file);
|
||||||
return 0;
|
return 0;
|
||||||
FAILED:
|
FAILED:
|
||||||
printf("cache is invalid... compiling from source.\n");
|
fprintf(stderr,"cache is invalid... compiling from source.\n");
|
||||||
fclose(bytecode_file);
|
fclose(bytecode_file);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
@@ -346,7 +346,7 @@ Execution execute(char *path, Stack *stack) {
|
|||||||
end = clock();
|
end = clock();
|
||||||
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
||||||
total_time_spent += time_spent;
|
total_time_spent += time_spent;
|
||||||
printf("Lexer time taken: %f seconds\n", time_spent);
|
fprintf(stderr,"Lexer time taken: %f seconds\n", time_spent);
|
||||||
fclose(state.file);
|
fclose(state.file);
|
||||||
|
|
||||||
DArray ast;
|
DArray ast;
|
||||||
@@ -363,7 +363,7 @@ Execution execute(char *path, Stack *stack) {
|
|||||||
end = clock();
|
end = clock();
|
||||||
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
||||||
total_time_spent += time_spent;
|
total_time_spent += time_spent;
|
||||||
printf("Parser time taken: %f seconds\n", time_spent);
|
fprintf(stderr,"Parser time taken: %f seconds\n", time_spent);
|
||||||
darray_free(&tokens, free_token);
|
darray_free(&tokens, free_token);
|
||||||
|
|
||||||
start = clock();
|
start = clock();
|
||||||
@@ -380,7 +380,7 @@ Execution execute(char *path, Stack *stack) {
|
|||||||
end = clock();
|
end = clock();
|
||||||
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
||||||
total_time_spent += time_spent;
|
total_time_spent += time_spent;
|
||||||
printf("Translation time taken: %f seconds\n", time_spent);
|
fprintf(stderr,"Translation time taken: %f seconds\n", time_spent);
|
||||||
|
|
||||||
darray_free(&ast, free_parsed);
|
darray_free(&ast, free_parsed);
|
||||||
#if defined(__linux__)
|
#if defined(__linux__)
|
||||||
@@ -452,8 +452,8 @@ Execution execute(char *path, Stack *stack) {
|
|||||||
end = clock();
|
end = clock();
|
||||||
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
time_spent = (double)(end - start) / CLOCKS_PER_SEC;
|
||||||
total_time_spent += time_spent;
|
total_time_spent += time_spent;
|
||||||
printf("Execution time taken: %f seconds\n", time_spent);
|
fprintf(stderr,"Execution time taken: %f seconds\n", time_spent);
|
||||||
printf("total time taken: %f seconds\n", total_time_spent);
|
fprintf(stderr,"total time taken: %f seconds\n", total_time_spent);
|
||||||
|
|
||||||
return (Execution){err, *main_scope};
|
return (Execution){err, *main_scope};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,6 @@
|
|||||||
#include "return/return.h"
|
#include "return/return.h"
|
||||||
#include "string/string.h"
|
#include "string/string.h"
|
||||||
#include <gmp.h>
|
#include <gmp.h>
|
||||||
#include <gmp.h>
|
|
||||||
#include <stdbool.h>
|
#include <stdbool.h>
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
@@ -75,6 +74,8 @@ ParsedValueReturn parse_token_full(char *file, DArray *tokens, size_t *index,
|
|||||||
return parse_if(file, tokens, index);
|
return parse_if(file, tokens, index);
|
||||||
case TOKEN_RETURN:
|
case TOKEN_RETURN:
|
||||||
return parse_return(file, tokens, index);
|
return parse_return(file, tokens, index);
|
||||||
|
case TOKEN_LET:
|
||||||
|
return parse_declaration(file, tokens, index);
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
};
|
};
|
||||||
@@ -121,9 +122,6 @@ ParsedValueReturn parse_token_full(char *file, DArray *tokens, size_t *index,
|
|||||||
(*index)++;
|
(*index)++;
|
||||||
output = parse_number(token, file);
|
output = parse_number(token, file);
|
||||||
break;
|
break;
|
||||||
case TOKEN_LET:
|
|
||||||
output = parse_declaration(file, tokens, index);
|
|
||||||
break;
|
|
||||||
case TOKEN_DO:
|
case TOKEN_DO:
|
||||||
output = parse_dowrap(file, tokens, index);
|
output = parse_dowrap(file, tokens, index);
|
||||||
break;
|
break;
|
||||||
|
|||||||
@@ -36,8 +36,7 @@ void darray_armem_resize(darray_armem *arr, size_t new_size) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
size_t required_bytes = new_size * arr->element_size;
|
size_t required_bytes = new_size * arr->element_size;
|
||||||
size_t new_capacity_bytes =
|
size_t new_capacity_bytes =required_bytes*2;
|
||||||
((required_bytes + CHUNK_SIZE - 1) / CHUNK_SIZE) * CHUNK_SIZE;
|
|
||||||
size_t new_capacity = new_capacity_bytes / arr->element_size;
|
size_t new_capacity = new_capacity_bytes / arr->element_size;
|
||||||
|
|
||||||
if (!new_capacity) {
|
if (!new_capacity) {
|
||||||
|
|||||||
@@ -10,12 +10,14 @@
|
|||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include "string.h"
|
#include "string.h"
|
||||||
|
#include "../number/number.h"
|
||||||
|
|
||||||
ArgonObject *ARGON_STRING_TYPE = NULL;
|
ArgonObject *ARGON_STRING_TYPE = NULL;
|
||||||
|
|
||||||
ArgonObject *new_string_object(char*data, size_t length) {
|
ArgonObject *new_string_object(char*data, size_t length) {
|
||||||
ArgonObject * object = new_object();
|
ArgonObject * object = new_object();
|
||||||
add_field(object, "__class__", ARGON_STRING_TYPE);
|
add_field(object, "__class__", ARGON_STRING_TYPE);
|
||||||
|
add_field(object, "length", new_number_object_from_long(length, 1));
|
||||||
object->type = TYPE_STRING;
|
object->type = TYPE_STRING;
|
||||||
object->value.as_str.data = ar_alloc_atomic(length);
|
object->value.as_str.data = ar_alloc_atomic(length);
|
||||||
memcpy(object->value.as_str.data, data, length);
|
memcpy(object->value.as_str.data, data, length);
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ size_t translate_parsed_call(Translated *translated, ParsedCall *call,
|
|||||||
}
|
}
|
||||||
push_instruction_byte(translated, OP_INIT_CALL);
|
push_instruction_byte(translated, OP_INIT_CALL);
|
||||||
push_instruction_code(translated, call->args.size);
|
push_instruction_code(translated, call->args.size);
|
||||||
|
push_instruction_byte(translated, OP_NEW_SCOPE);
|
||||||
|
|
||||||
DArray *old_return_jumps = translated->return_jumps;
|
DArray *old_return_jumps = translated->return_jumps;
|
||||||
translated->return_jumps = NULL;
|
translated->return_jumps = NULL;
|
||||||
@@ -28,6 +29,7 @@ size_t translate_parsed_call(Translated *translated, ParsedCall *call,
|
|||||||
}
|
}
|
||||||
|
|
||||||
translated->return_jumps = old_return_jumps;
|
translated->return_jumps = old_return_jumps;
|
||||||
|
push_instruction_byte(translated, OP_POP_SCOPE);
|
||||||
|
|
||||||
push_instruction_byte(translated, OP_SOURCE_LOCATION);
|
push_instruction_byte(translated, OP_SOURCE_LOCATION);
|
||||||
push_instruction_code(translated, call->line);
|
push_instruction_code(translated, call->line);
|
||||||
|
|||||||
11
testing.ar
11
testing.ar
@@ -1 +1,10 @@
|
|||||||
term.log("hello "+"world")
|
let say_hi(name) = do
|
||||||
|
let z(y) = do
|
||||||
|
return y
|
||||||
|
let u = z(
|
||||||
|
do
|
||||||
|
return name
|
||||||
|
)
|
||||||
|
return "hello "+u+", how are you?"
|
||||||
|
|
||||||
|
term.log(say_hi("william"))
|
||||||
Reference in New Issue
Block a user