Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Import('envService')
- import os
- #Need java environment because we are using SIDE to compile our grammar. SIDE is a JAVA component.
- env = envService.get_envJava().Clone()
- ########################################################################
- # Return all source files needed for grammar compilation
- # sideLibrary.jar is needed to compile grammar with verbyx
- ########################################################################
- def _get_source_files():
- return env.Glob(os.path.join(env['GRAMMAR_SRC_DIR'], '*.grammar')) + \
- env.Glob(os.path.join(env['GRAMMAR_SRC_DIR'],'*.dictionary'))
- ########################################################################
- # Get files generated by grammar compilation.
- # They will be removed when calling scons -c
- ########################################################################
- def _get_verbyx_files_to_clean():
- return env.getAbsolutePath([
- 'LINK/data/SOURCES'])
- ########################################################################
- # Get target generated by grammar compilation.
- # They will be removed when calling scons -c
- ########################################################################
- def _get_verbyx_target():
- return env.getAbsolutePath([
- 'LINK/data/Grammars/abnf/bin/' + env['LANGUAGE'] + '/' + env['GRAMMAR_NAME'] + '/autopron',
- 'LINK/data/Grammars/abnf/bin/' + env['LANGUAGE'] + '/' + env['GRAMMAR_NAME'] + '/grammar',
- 'LINK/data/Grammars/abnf/bin/' + env['LANGUAGE'] + '/' + env['GRAMMAR_NAME'] + '/lexicon',
- 'LINK/data/Grammars/abnf/bin/' + env['LANGUAGE'] + '/' + env['GRAMMAR_NAME'] + '/package',
- 'LINK/data/Grammars/abnf/bin/' + env['LANGUAGE'] + '/' + env['GRAMMAR_NAME'] + '/semantic',
- 'LINK/data/Grammars/abnf/bin/' + env['LANGUAGE'] + '/' + env['GRAMMAR_NAME'] + '/version.txt'])
- ########################################################################
- # Get files generated by grammar compilation.
- # They will be removed when calling scons -c
- ########################################################################
- def get_nuance_files_to_clean():
- return env.getAbsolutePath([
- 'LINK/data/Grammars/bin'])
- ########################################################################
- ############################ MAIN ####################################
- ########################################################################
- env['LANGUAGE'] = 'en'
- env['LANGUAGE_PACK'] = 'English.America.3.3.0'
- env['GRAMMAR_NAME'] = 'faa'
- env['GRAMMAR_PRJ'] = 'faa'
- env['GRAMMAR_SRC_DIR']= os.path.join(env['LOCALROOT'],'sr','sra','src','grammars',env['LANGUAGE'],env['GRAMMAR_PRJ'])
- env['DEPLOY_PATH'] = os.path.join(env['LOCALROOT'],'sr','sra','src','grammars')
- env['GRAMMAR_FOLDER_NAME'] = 'grammars'
- #Compiles the grammar using a scons Command mechanism.
- #fuck = _get_verbyx_target()
- #for f in fuck:
- # print str(f)
- vrxTgt = env.buildGrammarForVrx(_get_verbyx_target(),
- _get_source_files())
- print 'myTargets:'
- for f in vrxTgt:
- print str(f)
- print 'endMyTargets'
- #Invoke with scons -c
- env.Clean(vrxTgt, _get_verbyx_files_to_clean())
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement