Skip to content

Commit

Permalink
py wrapper: autodetect sys
Browse files Browse the repository at this point in the history
  • Loading branch information
alimpfard committed Jul 22, 2020
1 parent 4108fa2 commit 968d50c
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 3 deletions.
16 changes: 14 additions & 2 deletions wrapper/python/nlex/wrap/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,20 @@
import zipfile
from zipfile import ZipExtFile
import hashlib
import platform

class NLexTokenizerCreationException(Exception):
pass

_default_options = {
'Linux': {'output_file': 'tokenizer.so', 'sys': ''},
'Windows': {'output_file': 'tokenizer.dll', 'sys': 'windows'},
}.get(platform.system(), None)

if _default_options is None:
print(f"Running on unsupported system ({platform.system()}), this is likely to blow up")
_default_options = {'output_file': 'tokenizer', 'sys': ''}

ZipExtFile._update_crc = lambda self, *args: None

class callout:
Expand Down Expand Up @@ -45,16 +55,18 @@ def compile_and_download(identifier, output_file, compiler_server):
f.extract('tokenizer.so')
os.rename('tokenizer.so', output_file)
os.chmod(output_file, 0o755)
os.remove(output_file + '.zip')
return os.path.realpath(output_file)
except:
raise



def NLexTokenizer(*args,
arch='x64', vendor='', sys='', target='',
arch='x64', vendor='', sys=_default_options['sys'], target='',
object_format='', library='on', cpu='generic', relocation_model='pic',
features='', output_file='tokenizer', compiler_server='https://nlex.herokuapp.com'):
features='', output_file=_default_options['output_file'],
compiler_server='https://nlex.herokuapp.com'):
if len(args) == 0:
return lambda fn: NLexTokenizer(fn,
arch=arch, vendor=vendor, sys=sys, target=target, relocation_model=relocation_model,
Expand Down
2 changes: 1 addition & 1 deletion wrapper/python/test1.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import nlex

@nlex.NLexTokenizer(sys='windows')
@nlex.NLexTokenizer
def tokenize(inp, process_docs):
"""
foo :: [124]
Expand Down

0 comments on commit 968d50c

Please sign in to comment.