Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #$load_pysite
- import imp
- import os
- import sys
- import warnings
- from os.path import dirname
- from site import addsitedir
- def _get_pio_site_packages_dir():
- if os.getenv("PIOCOREPYSITEDIR"):
- return os.getenv("PIOCOREPYSITEDIR")
- data=imp.find_module("platformio")
- if data:
- return dirname(data[1])
- return None
- pio_sp_dir=_get_pio_site_packages_dir()
- if pio_sp_dir:
- if pio_sp_dir in sys.path:
- sys.path.remove(pio_sp_dir)
- sys.path.insert(0,pio_sp_dir)
- del pio_sp_dir
- if os.getenv("PYTHONPYSITEDIR"):
- warnings.simplefilter("ignore")
- addsitedir(os.getenv("PYTHONPYSITEDIR"))
- sys.path.insert(0,os.getenv("PYTHONPYSITEDIR"))
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$config
- import sys
- VERSION=(1,5,3)
- __version__=".".join([str(s)for s in VERSION])
- __title__="platformio-plus"
- __description__="PIO Plus"
- __url__="https://pioplus.com"
- __author__="Ivan Kravets"
- __email__="me@ikravets.com"
- __license__="Proprietary License"
- __copyright__="Copyright 2014-present PlatformIO"
- CLOUD_API_ENDPOINT="https://api.pioplus.com"
- CLOUD_API_VERSION=1
- PIOREMOTE_HOST="rs.pioplus.com"
- PIOREMOTE_PORT=8813
- PIOREMOTE_PING_DELAY=60
- PIOREMOTE_PING_MAX_FAILURES=3
- DEBUG=False
- if DEBUG:
- CLOUD_API_ENDPOINT="http://127.0.0.1:8013"
- PIOREMOTE_HOST="localhost"
- if sys.version_info<(2,7,0)or sys.version_info>=(3,0,0):
- msg=("PlatformIO version %s does not run under Python version %s.\n" "Python 3 is not yet supported.\n")
- sys.stderr.write(msg%(__version__,sys.version.split()[0]))
- sys.exit(1)
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$exception
- from platformio.exception import PlatformioException
- class TestDirNotExists(PlatformioException):
- MESSAGE="A test folder '{0}' does not exist.\nPlease create 'test' " "directory in project's root and put a test set.\n" "More details about Unit " "Testing: http://docs.platformio.org/page/plus/" "unit-testing.html"
- class AccountPermissionError(PlatformioException):
- MESSAGE="You do not have permission for this operation. Please use " "`pio account show` command to check current permissions.\n" "Further details: https://pioplus.com/pricing.html " "or support@pioplus.com"
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$requests_threads
- from requests import Session
- from twisted.internet import threads
- from twisted.internet.defer import ensureDeferred
- class AsyncSession(Session):
- def __init__(self,n=None,*args,**kwargs):
- if n:
- from twisted.internet import reactor
- pool=reactor.getThreadPool()
- pool.adjustPoolsize(0,n)
- super(AsyncSession,self).__init__(*args,**kwargs)
- def request(self,*args,**kwargs):
- func=super(AsyncSession,self).request
- return threads.deferToThread(func,*args,**kwargs)
- def wrap(self,*args,**kwargs):
- return ensureDeferred(*args,**kwargs)
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$cmd_account
- import json
- import os
- import re
- import socket
- import struct
- from binascii import hexlify,unhexlify
- from datetime import datetime
- from hashlib import sha1
- from time import time
- import click
- from platformio import __author__,__title__,app
- from platformio.exception import ReturnErrorCode
- from platformio.util import get_api_result
- import config
- from exception import AccountPermissionError
- ACCOUNT_DATA_KEY="a8899e216a19f0646172ca5c13aa0ee19a808f3b"
- def get_host_id():
- id_=sha1(app.get_cid())
- id_.update(get_host_name())
- return id_.hexdigest()
- def get_host_name():
- return str(socket.gethostname())[:255]
- def store_account_data(data):
- with app.ContentCache()as cc:
- cc.set(ACCOUNT_DATA_KEY,json.dumps(data),"3d")
- def restore_account_data():
- with app.ContentCache()as cc:
- data=cc.get(ACCOUNT_DATA_KEY)
- if not data:
- return None
- if isinstance(data,dict):
- return data
- try:
- return json.loads(data)
- except ValueError:
- return None
- def delete_account_data():
- with app.ContentCache()as cc:
- return cc.delete(ACCOUNT_DATA_KEY)
- def store_session_token(token):
- app.set_state_item("stoken",token)
- def restore_session_token():
- return app.get_state_item("stoken")
- def delete_session_token():
- return app.delete_state_item("stoken")
- def store_username(username):
- app.set_state_item("username",username)
- def restore_username():
- return app.get_state_item("username")
- def delete_username():
- return app.delete_state_item("username")
- def pack_session_token(host_id,session_id,permission_bitmask,expire):
- assert isinstance(host_id,basestring)and len(host_id)==40
- hmac=sha1(str(host_id))
- hmac.update(str(session_id))
- hmac.update(str(permission_bitmask))
- hmac.update(str(expire))
- hmac.update("PlatformIO"+str(__title__))
- return "%s%s"%(hmac.hexdigest(),hexlify(struct.pack("!III",session_id,permission_bitmask,expire)))
- def unpack_session_token(token,host_id=None):
- if not token or len(token)!=64:
- return None
- try:
- (session_id,permission_bitmask,expire)=struct.unpack("!III",unhexlify(token[40:]))
- if(host_id and token[0:40]!=pack_session_token(host_id,session_id,permission_bitmask,expire)[0:40]):
- return None
- except(AssertionError,struct.error):
- return None
- return{"session_id":session_id,"permission_bitmask":permission_bitmask,"expire":expire}
- def get_session_data():
- data=unpack_session_token(restore_session_token(),get_host_id())
- if data and data['expire']>time():
- return data
- if os.getenv("PLATFORMIO_AUTH_TOKEN"):
- click.echo("Authenticating using token")
- if not account_login("token",os.getenv("PLATFORMIO_AUTH_TOKEN")):
- return None
- return unpack_session_token(restore_session_token(),get_host_id())
- return data
- def cmd_check_permission(cmd_ctx):
- assert cmd_ctx.args
- allowed_conds=[cmd_ctx.args[0]=="home",len(cmd_ctx.args)>1 and cmd_ctx.args[0]=="account" and cmd_ctx.args[1]in("login","register","forgot")]
- if any(allowed_conds):
- return True
- data=get_session_data()
- if not data or data['expire']<time():
- click.secho("You are not logged in. Please log in to PIO Account using " "PlatformIO IDE > PIO Home > Account or `pio account login` " "command and try again. \n\nIf you don't have PIO Account yet, " "please create it using PlatformIO IDE > PIO Home > Account " "or `pio account register` command.",fg="red",err=True)
- click.echo("Details: %s"%click.style("http://docs.platformio.org/page/" "userguide/account/index.html",fg="cyan"))
- raise ReturnErrorCode(1)
- permission_conds=[cmd_ctx.args[0]=="test" and not data['permission_bitmask']&1,(cmd_ctx.args[0]=="remote" and "test" in cmd_ctx.args and not data['permission_bitmask']&2),cmd_ctx.args[0]=="debug" and not data['permission_bitmask']&4]
- if any(permission_conds):
- raise AccountPermissionError()
- return True
- def cmd_validate_email(ctx,param,value):
- value=str(value).strip()
- if not re.match(r"^[^@]+@[^@]+\.[^@]+$",value):
- raise click.BadParameter("Invalid E-Mail address")
- return value
- def get_account_info(offline=False):
- info=restore_account_data()or{}
- info['username']=restore_username()
- if not info['username']:
- del info['username']
- if not offline and "groups" not in info:
- result=get_api_result("%s/v%d/account/info"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),params=dict(stoken=restore_session_token(),host_id=get_host_id()))
- info.update(result)
- store_account_data(info)
- return info
- @click.group("account",short_help="Manage PIO Account")
- def cli():
- pass
- @cli.command("register",short_help="Create new PIO Account")
- @click.option("-u","--username",prompt="E-Mail",callback=cmd_validate_email)
- def account_register(username,dummy=None):
- result=get_api_result("%s/v%d/account/register"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),data=dict(username=username,host_id=get_host_id(),host_name=get_host_name()))
- if "result" in result:
- click.secho("Successfully registered! \n" "Please check your E-Mail for the further instructions",fg="green")
- @cli.command("login",short_help="Log in to PIO Account")
- @click.option("-u","--username",prompt="E-Mail",callback=cmd_validate_email)
- @click.option("-p","--password",prompt=True,hide_input=True)
- def cmd_account_login(username,password):
- return account_login(username,password)
- def account_login(username,password):
- result=get_api_result("%s/v%d/account/login"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),data=dict(username=username,password=password,host_id=get_host_id(),host_name=get_host_name()))
- assert "stoken" in result
- store_username(username)
- store_session_token(result['stoken'])
- click.secho("Successfully authorized!",fg="green")
- return True
- @cli.command("logout",short_help="Log out of PIO Account")
- def account_logout():
- result=get_api_result("%s/v%d/account/logout"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),data=dict(stoken=restore_session_token(),host_id=get_host_id()))
- delete_session_token()
- delete_username()
- delete_account_data()
- if "result" in result:
- click.secho("Successfully unauthorized!",fg="green")
- @cli.command("password",short_help="Change password")
- @click.option("--old-password",prompt="Old password",hide_input=True)
- @click.option("--new-password",prompt="New password",hide_input=True,confirmation_prompt=True)
- def account_password(old_password,new_password):
- result=get_api_result("%s/v%d/account/password"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),data=dict(stoken=restore_session_token(),host_id=get_host_id(),old_password=old_password,new_password=new_password))
- if "result" in result:
- click.secho("Successfully updated password!",fg="green")
- @cli.command("token",short_help="Get or regenerate Personal Authentication Token")
- @click.option("-p","--password",prompt=True,hide_input=True)
- @click.option("--regenerate",is_flag=True)
- @click.option("--json-output",is_flag=True)
- def account_token(password,regenerate,json_output):
- result=get_api_result("%s/v%d/account/token"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),data=dict(stoken=restore_session_token(),host_id=get_host_id(),password=password,regenerate=int(regenerate)))
- if "token" not in result:
- return None
- if json_output:
- return click.echo(json.dumps({"status":"success","result":result['token']}))
- click.echo("Personal Authentication Token: %s"%result['token'])
- return True
- @cli.command("forgot",short_help="Forgot password")
- @click.option("-u","--username",prompt="E-Mail",callback=cmd_validate_email)
- def account_forgot(username,dummy=None):
- result=get_api_result("%s/v%d/account/forgot"%(config.CLOUD_API_ENDPOINT,config.CLOUD_API_VERSION),data=dict(username=username,host_id=get_host_id(),host_name=get_host_name()))
- if "result" in result:
- click.secho("If this account is registered, we will send the " "further instructions to your E-Mail.",fg="green")
- @cli.command("show",short_help="PIO Account information")
- @click.option("--offline",is_flag=True)
- @click.option("--json-output",is_flag=True)
- def account_show(offline,json_output):
- info=get_account_info(offline)
- if json_output:
- return click.echo(json.dumps(info))
- if "username" in info:
- click.echo()
- click.echo("Logged as: %s"%info['username'])
- if "currentPlan" in info:
- click.echo("PIO Plus Plan: %s"%info['currentPlan'])
- click.echo()
- if "groups" in info:
- for group in info['groups']:
- click.echo("Group %s"%click.style(group['name'],fg="cyan"))
- click.echo("-"*(6+len(group['name'])))
- if group['expire']:
- click.echo("Expire: %s"%datetime.fromtimestamp(int(group['expire'])).strftime("%Y-%m-%d %H:%M:%S"))
- else:
- click.echo("Expire: -")
- click.echo("Permissions: %s"%", ".join(group['permissions']))
- click.echo()
- if info.get("upgradePlan"):
- click.echo("UPGRADE: %s"%click.style("https://pioplus.com/pricing.html",fg="blue"))
- click.echo()
- return True
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$cmd_test
- import atexit
- from fnmatch import fnmatch
- from os import getcwd,listdir,remove
- from os.path import isdir,isfile,join
- from string import Template
- from time import sleep,time
- import click
- import serial
- from platformio import exception,util
- from platformio.commands.run import check_project_envs
- from platformio.commands.run import cli as cmd_run
- from platformio.commands.run import print_header
- from platformio.managers.platform import PlatformFactory
- from exception import TestDirNotExists
- TRANSPORT_OPTIONS={"arduino":{"include":"#include <Arduino.h>","object":"","putchar":"Serial.write(c)","flush":"Serial.flush()","begin":"Serial.begin($baudrate)","end":"Serial.end()"},"mbed":{"include":"#include <mbed.h>","object":"Serial pc(USBTX, USBRX);","putchar":"pc.putc(c)","flush":"","begin":"pc.baud($baudrate)","end":""},"energia":{"include":"#include <Energia.h>","object":"","putchar":"Serial.write(c)","flush":"Serial.flush()","begin":"Serial.begin($baudrate)","end":"Serial.end()"},"espidf":{"include":"#include <stdio.h>","object":"","putchar":"putchar(c)","flush":"fflush(stdout)","begin":"","end":""},"native":{"include":"#include <stdio.h>","object":"","putchar":"putchar(c)","flush":"fflush(stdout)","begin":"","end":""},"custom":{"include":'#include "unittest_transport.h"',"object":"","putchar":"unittest_uart_putchar(c)","flush":"unittest_uart_flush()","begin":"unittest_uart_begin()","end":"unittest_uart_end()"}}
- @click.command("test",short_help="Unit Testing")
- @click.option("--environment","-e",multiple=True,metavar="<environment>")
- @click.option("--filter","-f",multiple=True,metavar="<pattern>",help="Filter tests by a pattern")
- @click.option("--ignore","-i",multiple=True,metavar="<pattern>",help="Ignore tests by a pattern")
- @click.option("--upload-port")
- @click.option("--test-port")
- @click.option("-d","--project-dir",default=getcwd,type=click.Path(exists=True,file_okay=False,dir_okay=True,writable=True,resolve_path=True))
- @click.option("--without-building",is_flag=True)
- @click.option("--without-uploading",is_flag=True)
- @click.option("--without-testing",is_flag=True)
- @click.option("--no-reset",is_flag=True)
- @click.option("--monitor-rts",default=None,type=click.IntRange(0,1),help="Set initial RTS line state for Serial Monitor")
- @click.option("--monitor-dtr",default=None,type=click.IntRange(0,1),help="Set initial DTR line state for Serial Monitor")
- @click.option("--verbose","-v",is_flag=True)
- @click.pass_context
- def cli(ctx,environment,ignore,filter,upload_port,test_port,project_dir,without_building,without_uploading,without_testing,no_reset,monitor_rts,monitor_dtr,verbose):
- with util.cd(project_dir):
- test_dir=util.get_projecttest_dir()
- if not isdir(test_dir):
- raise TestDirNotExists(test_dir)
- test_names=get_test_names(test_dir)
- projectconf=util.load_project_config()
- env_default=None
- if projectconf.has_option("platformio","env_default"):
- env_default=util.parse_conf_multi_values(projectconf.get("platformio","env_default"))
- assert check_project_envs(projectconf,environment or env_default)
- click.echo("Verbose mode can be enabled via `-v, --verbose` option")
- click.echo("Collected %d items"%len(test_names))
- start_time=time()
- results=[]
- for testname in test_names:
- for section in projectconf.sections():
- if not section.startswith("env:"):
- continue
- patterns=dict(filter=list(filter),ignore=list(ignore))
- for key in patterns:
- if projectconf.has_option(section,"test_%s"%key):
- patterns[key].extend([p.strip()for p in projectconf.get(section,"test_%s"%key).split(", ")if p.strip()])
- envname=section[4:]
- skip_conditions=[environment and envname not in environment,not environment and env_default and envname not in env_default,testname!="*" and patterns['filter']and not any([fnmatch(testname,p)for p in patterns['filter']]),testname!="*" and any([fnmatch(testname,p)for p in patterns['ignore']]),]
- if any(skip_conditions):
- results.append((None,testname,envname))
- continue
- cls=(NativeTestProcessor if projectconf.get(section,"platform")=="native" else EmbeddedTestProcessor)
- tp=cls(ctx,testname,envname,dict(project_config=projectconf,project_dir=project_dir,upload_port=upload_port,test_port=test_port,without_building=without_building,without_uploading=without_uploading,without_testing=without_testing,no_reset=no_reset,monitor_rts=monitor_rts,monitor_dtr=monitor_dtr,verbose=verbose))
- results.append((tp.process(),testname,envname))
- if without_testing:
- return
- click.echo()
- print_header("[%s]"%click.style("TEST SUMMARY"))
- passed=True
- for result in results:
- status,testname,envname=result
- status_str=click.style("PASSED",fg="green")
- if status is False:
- passed=False
- status_str=click.style("FAILED",fg="red")
- elif status is None:
- status_str=click.style("IGNORED",fg="yellow")
- click.echo("test/%s/env:%s\t[%s]"%(click.style(testname,fg="yellow"),click.style(envname,fg="cyan"),status_str),err=status is False)
- print_header("[%s] Took %.2f seconds"%((click.style("PASSED",fg="green",bold=True)if passed else click.style("FAILED",fg="red",bold=True)),time()-start_time),is_error=not passed)
- if not passed:
- raise exception.ReturnErrorCode(1)
- class TestProcessorBase(object):
- DEFAULT_BAUDRATE=115200
- def __init__(self,cmd_ctx,testname,envname,options):
- self.cmd_ctx=cmd_ctx
- self.cmd_ctx.meta['piotest_processor']=True
- self.test_name=testname
- self.options=options
- self.env_name=envname
- self.env_options={k:v for k,v in options['project_config'].items("env:"+envname)}
- self._run_failed=False
- self._outputcpp_generated=False
- def get_transport(self):
- transport=self.env_options.get("framework")
- if self.env_options.get("platform")=="native":
- transport="native"
- if "test_transport" in self.env_options:
- transport=self.env_options['test_transport']
- if transport not in TRANSPORT_OPTIONS:
- raise exception.PlatformioException("Unknown Unit Test transport `%s`"%transport)
- return transport.lower()
- def get_baudrate(self):
- return int(self.env_options.get("test_speed",self.DEFAULT_BAUDRATE))
- def print_progress(self,text,is_error=False):
- click.echo()
- print_header("[test/%s] %s"%(click.style(self.test_name,fg="yellow",bold=True),text),is_error=is_error)
- def build_or_upload(self,target):
- if not self._outputcpp_generated:
- self.generate_outputcpp(util.get_projecttest_dir())
- self._outputcpp_generated=True
- if self.test_name!="*":
- self.cmd_ctx.meta['piotest']=self.test_name
- if not self.options['verbose']:
- click.echo("Please wait...")
- return self.cmd_ctx.invoke(cmd_run,project_dir=self.options['project_dir'],upload_port=self.options['upload_port'],silent=not self.options['verbose'],environment=[self.env_name],disable_auto_clean="nobuild" in target,target=target)
- def process(self):
- raise NotImplementedError
- def run(self):
- raise NotImplementedError
- def on_run_out(self,line):
- if line.endswith(":PASS"):
- click.echo("%s\t[%s]"%(line[:-5],click.style("PASSED",fg="green")))
- elif ":FAIL" in line:
- self._run_failed=True
- click.echo("%s\t[%s]"%(line,click.style("FAILED",fg="red")))
- else:
- click.echo(line)
- def generate_outputcpp(self,test_dir):
- assert isdir(test_dir)
- cpp_tpl="\n".join(["$include","#include <output_export.h>","","$object","","void output_start(unsigned int baudrate)","{"," $begin;","}","","void output_char(int c)","{"," $putchar;","}","","void output_flush(void)","{"," $flush;","}","","void output_complete(void)","{"," $end;","}"])
- def delete_tmptest_file(file_):
- try:
- remove(file_)
- except:
- if isfile(file_):
- click.secho("Warning: Could not remove temporary file '%s'. " "Please remove it manually."%file_,fg="yellow")
- tpl=Template(cpp_tpl).substitute(TRANSPORT_OPTIONS[self.get_transport()])
- data=Template(tpl).substitute(baudrate=self.get_baudrate())
- tmp_file=join(test_dir,"output_export.cpp")
- with open(tmp_file,"w")as f:
- f.write(data)
- atexit.register(delete_tmptest_file,tmp_file)
- class NativeTestProcessor(TestProcessorBase):
- def process(self):
- if not self.options['without_building']:
- self.print_progress("Building... (1/2)")
- self.build_or_upload(["__test"])
- if self.options['without_testing']:
- return None
- self.print_progress("Testing... (2/2)")
- return self.run()
- def run(self):
- with util.cd(self.options['project_dir']):
- build_dir=util.get_projectbuild_dir()
- result=util.exec_command([join(build_dir,self.env_name,"program")],stdout=util.AsyncPipe(self.on_run_out),stderr=util.AsyncPipe(self.on_run_out))
- assert "returncode" in result
- return result['returncode']==0 and not self._run_failed
- class EmbeddedTestProcessor(TestProcessorBase):
- SERIAL_TIMEOUT=600
- def process(self):
- if not self.options['without_building']:
- self.print_progress("Building... (1/3)")
- target=["__test"]
- if self.options['without_uploading']:
- target.append("checkprogsize")
- self.build_or_upload(target)
- if not self.options['without_uploading']:
- self.print_progress("Uploading... (2/3)")
- target=["upload"]
- if self.options['without_building']:
- target.append("nobuild")
- else:
- target.append("__test")
- self.build_or_upload(target)
- if self.options['without_testing']:
- return None
- self.print_progress("Testing... (3/3)")
- return self.run()
- def run(self):
- click.echo("If you don't see any output for the first 10 secs, " "please reset board (press reset button)")
- click.echo()
- try:
- ser=serial.Serial(baudrate=self.get_baudrate(),timeout=self.SERIAL_TIMEOUT)
- ser.port=self.get_test_port()
- ser.rts=self.options['monitor_rts']
- ser.dtr=self.options['monitor_dtr']
- ser.open()
- except serial.SerialException as e:
- click.secho(str(e),fg="red",err=True)
- return False
- if not self.options['no_reset']:
- ser.flushInput()
- ser.setDTR(False)
- ser.setRTS(False)
- sleep(0.1)
- ser.setDTR(True)
- ser.setRTS(True)
- sleep(0.1)
- while True:
- line=ser.readline().strip()
- for i,c in enumerate(line[::-1]):
- if ord(c)>127:
- line=line[-i:]
- break
- if not line:
- continue
- self.on_run_out(line)
- if all([l in line for l in("Tests","Failures","Ignored")]):
- break
- ser.close()
- return not self._run_failed
- def get_test_port(self):
- if self.options.get("test_port"):
- return self.options.get("test_port")
- elif self.env_options.get("test_port"):
- return self.env_options.get("test_port")
- assert set(["platform","board"])&set(self.env_options.keys())
- p=PlatformFactory.newPlatform(self.env_options['platform'])
- board_hwids=p.board_config(self.env_options['board']).get("build.hwids",[])
- port=None
- elapsed=0
- while elapsed<5 and not port:
- for item in util.get_serialports():
- port=item['port']
- for hwid in board_hwids:
- hwid_str=("%s:%s"%(hwid[0],hwid[1])).replace("0x","")
- if hwid_str in item['hwid']:
- return port
- try:
- serial.Serial(port,timeout=self.SERIAL_TIMEOUT).close()
- except serial.SerialException:
- port=None
- if not port:
- sleep(0.25)
- elapsed+=0.25
- if not port:
- raise exception.PlatformioException("Please specify `test_port` for environment or use " "global `--test-port` option.")
- return port
- def get_test_names(test_dir):
- names=[]
- for item in sorted(listdir(test_dir)):
- if isdir(join(test_dir,item)):
- names.append(item)
- if not names:
- names=["*"]
- return names
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$project_sync
- import os
- import tarfile
- from binascii import crc32
- from os.path import getmtime,getsize,isdir,isfile,join
- from twisted.python import constants
- class PROJECT_SYNC_STAGE(constants.Flags):
- INIT=constants.FlagConstant()
- DBINDEX=constants.FlagConstant()
- DELETE=constants.FlagConstant()
- UPLOAD=constants.FlagConstant()
- EXTRACTED=constants.FlagConstant()
- COMPLETED=constants.FlagConstant()
- class ProjectSync(object):
- def __init__(self,path):
- self.path=path
- if not isdir(self.path):
- os.makedirs(self.path)
- self.items=[]
- self._db={}
- def add_item(self,path,relpath,cb_filter=None):
- self.items.append((path,relpath,cb_filter))
- def get_items(self):
- return self.items
- def rebuild_dbindex(self):
- self._db={}
- for(path,relpath,cb_filter)in self.items:
- if cb_filter and not cb_filter(path):
- continue
- self._insert_to_db(path,relpath)
- if isdir(path):
- for(root,_,files)in os.walk(path,followlinks=True):
- for name in files:
- self._insert_to_db(join(root,name),join(relpath,root[len(path)+1:],name))
- def _insert_to_db(self,path,relpath):
- if not isfile(path):
- return
- index=crc32("%s-%s-%s"%(relpath,getmtime(path),getsize(path)))
- self._db[index]=(path,relpath)
- def get_dbindex(self):
- return self._db.keys()
- def delete_dbindex(self,dbindex):
- for index in dbindex:
- if index not in self._db:
- continue
- path=self._db[index][0]
- if isfile(path):
- os.remove(path)
- del self._db[index]
- self.delete_empty_folders()
- return True
- def delete_empty_folders(self):
- deleted=False
- for item in self.items:
- if not isdir(item[0]):
- continue
- for root,dirs,files in os.walk(item[0]):
- if not dirs and not files and root!=item[0]:
- deleted=True
- os.rmdir(root)
- if deleted:
- return self.delete_empty_folders()
- return True
- def compress_items(self,fileobj,dbindex,max_size):
- compressed=[]
- total_size=0
- tar_opts=dict(fileobj=fileobj,mode="w:gz",bufsize=0,dereference=True)
- with tarfile.open(**tar_opts)as tgz:
- for index in dbindex:
- compressed.append(index)
- if index not in self._db:
- continue
- path,relpath=self._db[index]
- tgz.add(path,relpath)
- total_size+=getsize(path)
- if total_size>max_size:
- break
- return compressed
- def decompress_items(self,fileobj):
- fileobj.seek(0)
- with tarfile.open(fileobj=fileobj,mode="r:gz")as tgz:
- tgz.extractall(self.path)
- return True
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$remote_client
- import json
- import os
- import zlib
- from datetime import datetime
- from hashlib import sha1
- from io import BytesIO
- from os.path import basename,isdir,isfile,join
- from time import time
- import click
- from platformio import exception,maintenance,util
- from twisted.cred import credentials
- from twisted.internet import defer,protocol,reactor,task
- from twisted.logger import ILogObserver,Logger,LogLevel,formatEvent
- from twisted.python import failure
- from twisted.spread import pb
- from zope.interface import provider
- import cmd_account
- import config
- from project_sync import PROJECT_SYNC_STAGE,ProjectSync
- class PioAgentNotStartedError(pb.Error):
- pass
- class RemoteFactory(pb.PBClientFactory,protocol.ReconnectingClientFactory):
- def clientConnectionMade(self,broker):
- pb.PBClientFactory.clientConnectionMade(self,broker)
- protocol.ReconnectingClientFactory.resetDelay(self)
- self.remote_client.log.info("Successfully connected")
- self.remote_client.log.info("Authenticating")
- stoken=cmd_account.restore_session_token()
- d=self.login(credentials.UsernamePassword(stoken,cmd_account.get_host_id()),client=self.remote_client)
- d.addCallback(self.remote_client.cb_client_authorization_made)
- d.addErrback(self.remote_client.cb_client_authorization_failed)
- def clientConnectionFailed(self,connector,reason):
- self.remote_client.log.warn("Could not connect to PIO Remote Cloud. Reconnecting...")
- self.remote_client.cb_disconnected(reason)
- protocol.ReconnectingClientFactory.clientConnectionFailed(self,connector,reason)
- def clientConnectionLost(self,connector,unused_reason):
- if not reactor.running:
- self.remote_client.log.info("Successfully disconnected")
- return
- self.remote_client.log.warn("Connection is lost to PIO Remote Cloud. Reconnecting")
- pb.PBClientFactory.clientConnectionLost(self,connector,unused_reason,reconnecting=1)
- self.remote_client.cb_disconnected(unused_reason)
- protocol.ReconnectingClientFactory.clientConnectionLost(self,connector,unused_reason)
- class RemoteClientBase(pb.Referenceable):
- def __init__(self):
- self.log_level=LogLevel.warn
- self.log=Logger(namespace="remote",observer=self._log_observer)
- self.id=cmd_account.get_host_id()
- self.name=cmd_account.get_host_name()
- self.join_options={"cliver":config.__version__}
- self.perspective=None
- self.agentpool=None
- self._ping_id=0
- self._ping_caller=None
- self._ping_counter=0
- self._reactor_stopped=False
- self._exit_code=0
- @provider(ILogObserver)
- def _log_observer(self,event):
- if not config.DEBUG and(event['log_namespace']!=self.log.namespace or self.log_level>event['log_level']):
- return
- msg=formatEvent(event)
- click.echo("%s [%s] %s"%(datetime.fromtimestamp(event['log_time']).strftime('%Y-%m-%d %H:%M:%S'),event['log_level'].name,msg))
- def connect(self):
- self.log.info("Name: {name}",name=self.name)
- self.log.info("Connecting to PIO Remote Cloud")
- factory=RemoteFactory()
- factory.remote_client=self
- reactor.connectTCP(config.PIOREMOTE_HOST,config.PIOREMOTE_PORT,factory)
- reactor.run()
- if self._exit_code!=0:
- raise exception.ReturnErrorCode(self._exit_code)
- def remote_service(self,command,options):
- if command=="disconnect":
- self.log.error("PIO Remote Cloud disconnected: {msg}",msg=options.get("message"))
- self.disconnect()
- elif command=="cliupdate":
- self.log.info("Preparing for remote update of PIO Plus CLI")
- self.disconnect(13)
- def cb_client_authorization_failed(self,err):
- msg="Bad account credentials"
- if err.check(pb.Error):
- msg=err.getErrorMessage()
- self.log.error(msg)
- self.disconnect(exit_code=1)
- def cb_client_authorization_made(self,perspective):
- self.log.info("Successfully authorized")
- self.perspective=perspective
- d=perspective.callRemote("join",self.id,self.name,self.join_options)
- d.addCallback(self._cb_client_join_made)
- d.addErrback(self.cb_global_error)
- def _cb_client_join_made(self,result):
- code=result[0]
- if code==1:
- self.agentpool=result[1]
- self.agent_pool_ready()
- self.restart_ping()
- elif code==2:
- self.remote_service(*result[1:])
- def restart_ping(self,reset_counter=True):
- self.stop_ping(reset_counter)
- self._ping_caller=reactor.callLater(config.PIOREMOTE_PING_DELAY,self._do_ping)
- def _do_ping(self):
- self._ping_counter+=1
- self._ping_id=int(time())
- d=self.perspective.callRemote("service","ping",{"id":self._ping_id})
- d.addCallback(self._cb_pong)
- d.addErrback(self._cb_pong)
- def stop_ping(self,reset_counter=True):
- if reset_counter:
- self._ping_counter=0
- if not self._ping_caller or not self._ping_caller.active():
- return
- self._ping_caller.cancel()
- self._ping_caller=None
- def _cb_pong(self,result):
- if not isinstance(result,failure.Failure)and self._ping_id==result:
- self.restart_ping()
- return
- if self._ping_counter>=config.PIOREMOTE_PING_MAX_FAILURES:
- self.stop_ping()
- self.perspective.broker.transport.loseConnection()
- else:
- self.restart_ping(reset_counter=False)
- def agent_pool_ready(self):
- raise NotImplementedError
- def disconnect(self,exit_code=None):
- self.stop_ping()
- if exit_code is not None:
- self._exit_code=exit_code
- if reactor.running and not self._reactor_stopped:
- self._reactor_stopped=True
- reactor.stop()
- def cb_disconnected(self,_):
- self.stop_ping()
- self.perspective=None
- self.agentpool=None
- def cb_global_error(self,err):
- if err.check(pb.PBConnectionLost,defer.CancelledError):
- return
- msg=err.getErrorMessage()
- if err.check(pb.DeadReferenceError):
- msg="Remote Client has been terminated"
- elif "PioAgentNotStartedError" in str(err.type):
- msg=("Could not find active agents. Please start it before on " "a remote machine using `pio remote agent start` command.\n" "See http://docs.platformio.org/page/plus/pio-remote.html")
- click.secho(msg,fg="red",err=True)
- maintenance.on_platformio_exception(err)
- self.disconnect(exit_code=1)
- class ClientAgentReload(RemoteClientBase):
- def __init__(self,agents):
- RemoteClientBase.__init__(self)
- self.agents=agents
- def agent_pool_ready(self):
- d=self.agentpool.callRemote("cmd",self.agents,"reload")
- d.addCallback(self._cbResult)
- d.addErrback(self.cb_global_error)
- def _cbResult(self,result):
- for(success,value)in result:
- if not success:
- click.secho(value,fg="red",err=True)
- continue
- (id_,name,reloaded)=value
- click.secho(name,fg="cyan")
- click.echo("-"*len(name))
- click.echo("ID: %s"%id_)
- click.echo("Reloaded: %s"%datetime.fromtimestamp(reloaded).strftime('%Y-%m-%d %H:%M:%S'))
- click.echo("")
- self.disconnect()
- class ClientAgentList(RemoteClientBase):
- def agent_pool_ready(self):
- d=self.agentpool.callRemote("list",True)
- d.addCallback(self._cbResult)
- d.addErrback(self.cb_global_error)
- def _cbResult(self,result):
- for item in result:
- click.secho(item['name'],fg="cyan")
- click.echo("-"*len(item['name']))
- click.echo("ID: %s"%item['id'])
- click.echo("Started: %s"%datetime.fromtimestamp(item['started']).strftime('%Y-%m-%d %H:%M:%S'))
- click.echo("")
- self.disconnect()
- class ClientDeviceList(RemoteClientBase):
- def __init__(self,agents,json_output):
- RemoteClientBase.__init__(self)
- self.agents=agents
- self.json_output=json_output
- def agent_pool_ready(self):
- d=self.agentpool.callRemote("cmd",self.agents,"device.list")
- d.addCallback(self._cbResult)
- d.addErrback(self.cb_global_error)
- def _cbResult(self,result):
- data={}
- for(success,value)in result:
- if not success:
- click.secho(value,fg="red",err=True)
- continue
- (agent_name,devlist)=value
- data[agent_name]=devlist
- if self.json_output:
- click.echo(json.dumps(data))
- else:
- for agent_name,devlist in data.items():
- click.echo("Agent %s"%click.style(agent_name,fg="cyan",bold=True))
- click.echo("="*(6+len(agent_name)))
- for item in devlist:
- click.secho(item['port'],fg="cyan")
- click.echo("-"*len(item['port']))
- click.echo("Hardware ID: %s"%item['hwid'])
- click.echo("Description: %s"%item['description'])
- click.echo("")
- self.disconnect()
- class SMBridgeProtocol(protocol.Protocol):
- def connectionMade(self):
- self.factory.add_client(self)
- def connectionLost(self,reason):
- self.factory.remove_client(self)
- def dataReceived(self,data):
- self.factory.send_to_server(data)
- class SMBridgeFactory(protocol.ServerFactory):
- def __init__(self,cdm):
- self.cdm=cdm
- self._clients=[]
- def buildProtocol(self,addr):
- p=SMBridgeProtocol()
- p.factory=self
- return p
- def add_client(self,client):
- self.cdm.log.debug("SMBridge: Client connected")
- self._clients.append(client)
- self.cdm.acread_data()
- def remove_client(self,client):
- self.cdm.log.debug("SMBridge: Client disconnected")
- self._clients.remove(client)
- if not self._clients:
- self.cdm.client_terminal_stopped()
- def has_clients(self):
- return len(self._clients)
- def send_to_clients(self,data):
- if not self._clients:
- return None
- for client in self._clients:
- client.transport.write(data)
- return len(data)
- def send_to_server(self,data):
- self.cdm.acwrite_data(data)
- class ClientDeviceMonitor(RemoteClientBase):
- MAX_BUFFER_SIZE=1024*1024
- def __init__(self,agents,**kwargs):
- RemoteClientBase.__init__(self)
- self.agents=agents
- self.cmd_options=kwargs
- self._bridge_factory=SMBridgeFactory(self)
- self._agent_id=None
- self._ac_id=None
- self._d_acread=None
- self._d_acwrite=None
- self._acwrite_buffer=""
- def agent_pool_ready(self):
- d=task.deferLater(reactor,1,self.agentpool.callRemote,"cmd",self.agents,"device.list")
- d.addCallback(self._cb_device_list)
- d.addErrback(self.cb_global_error)
- def _cb_device_list(self,result):
- devices=[]
- hwid_devindexes=[]
- for(success,value)in result:
- if not success:
- click.secho(value,fg="red",err=True)
- continue
- (agent_name,ports)=value
- for port in ports:
- if "VID:PID" in port['hwid']:
- hwid_devindexes.append(len(devices))
- devices.append((agent_name,port))
- if len(result)==1 and self.cmd_options['port']:
- return self.start_remote_monitor(result[0][1][0])
- device=None
- if len(hwid_devindexes)==1:
- device=devices[hwid_devindexes[0]]
- else:
- click.echo("Available ports:")
- for i,device in enumerate(devices):
- click.echo("{index}. {host}{port} \t{description}".format(index=i+1,host=device[0]+":" if len(result)>1 else "",port=device[1]['port'],description=device[1]['description']if device[1]['description']!="n/a" else ""))
- device_index=click.prompt("Please choose a port (number in the list above)",type=click.Choice([str(i+1)for i,_ in enumerate(devices)]))
- device=devices[int(device_index)-1]
- self.start_remote_monitor(device[0],device[1]['port'])
- return None
- def start_remote_monitor(self,agent,port=None):
- options={}
- for key in("port","baud","parity","rtscts","xonxoff","rts","dtr"):
- options[key]=self.cmd_options[key]
- if port:
- options['port']=port
- click.echo("Starting Serial Monitor on {host}:{port}".format(host=agent,port=options['port']))
- d=self.agentpool.callRemote("cmd",[agent],"device.monitor",options)
- d.addCallback(self.cb_async_result)
- d.addErrback(self.cb_global_error)
- def cb_async_result(self,result):
- if len(result)!=1:
- raise pb.Error("Invalid response from Remote Cloud")
- success,value=result[0]
- if not success:
- raise pb.Error(value)
- reconnected=self._agent_id is not None
- self._agent_id,self._ac_id=value
- if reconnected:
- self.acread_data(force=True)
- self.acwrite_data("",force=True)
- return
- port=reactor.listenTCP(0,self._bridge_factory)
- address=port.getHost()
- self.log.debug("Serial Bridge is started on {address!r}",address=address)
- if "sock" in self.cmd_options:
- with open(join(self.cmd_options['sock'],"sock"),"w")as fp:
- fp.write("socket://localhost:%d"%address.port)
- def client_terminal_stopped(self):
- try:
- d=self.agentpool.callRemote("acclose",self._agent_id,self._ac_id)
- d.addCallback(lambda r:self.disconnect())
- d.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- def acread_data(self,force=False):
- if force and self._d_acread:
- self._d_acread.cancel()
- self._d_acread=None
- if(self._d_acread and not self._d_acread.called)or not self._bridge_factory.has_clients():
- return
- try:
- self._d_acread=self.agentpool.callRemote("acread",self._agent_id,self._ac_id)
- self._d_acread.addCallback(self.cb_acread_result)
- self._d_acread.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- def cb_acread_result(self,result):
- if result is None:
- self.disconnect(exit_code=1)
- else:
- self._bridge_factory.send_to_clients(result)
- self.acread_data()
- def acwrite_data(self,data,force=False):
- if force and self._d_acwrite:
- self._d_acwrite.cancel()
- self._d_acwrite=None
- self._acwrite_buffer+=data
- if len(self._acwrite_buffer)>self.MAX_BUFFER_SIZE:
- self._acwrite_buffer=self._acwrite_buffer[-1*self.MAX_BUFFER_SIZE:]
- if(self._d_acwrite and not self._d_acwrite.called)or not self._acwrite_buffer:
- return
- data=self._acwrite_buffer
- self._acwrite_buffer=""
- try:
- d=self.agentpool.callRemote("acwrite",self._agent_id,self._ac_id,data)
- d.addCallback(self.cb_acwrite_result)
- d.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- def cb_acwrite_result(self,result):
- assert result>0
- if self._acwrite_buffer:
- self.acwrite_data("")
- class ClientAsyncCmdBase(RemoteClientBase):
- def __init__(self,command,agents,options):
- RemoteClientBase.__init__(self)
- self.command=command
- self.agents=agents
- self.options=options
- self._acs_total=0
- self._acs_ended=0
- def cb_async_result(self,result):
- if self._acs_total==0:
- self._acs_total=len(result)
- for(success,value)in result:
- if not success:
- raise pb.Error(value)
- self.acread_data(*value)
- def acread_data(self,agent_id,ac_id,agent_name=None):
- d=self.agentpool.callRemote("acread",agent_id,ac_id)
- d.addCallback(self.cb_acread_result,agent_id,ac_id,agent_name)
- d.addErrback(self.cb_global_error)
- def cb_acread_result(self,result,agent_id,ac_id,agent_name):
- if result is None:
- self.acclose(agent_id,ac_id)
- else:
- if self._acs_total>1 and agent_name:
- click.echo("[%s] "%agent_name,nl=False)
- click.echo(result,nl=False)
- self.acread_data(agent_id,ac_id,agent_name)
- def acclose(self,agent_id,ac_id):
- d=self.agentpool.callRemote("acclose",agent_id,ac_id)
- d.addCallback(self.cb_acclose_result)
- d.addErrback(self.cb_global_error)
- def cb_acclose_result(self,exit_code):
- self._acs_ended+=1
- if self._acs_ended!=self._acs_total:
- return
- self.disconnect(exit_code)
- class ClientUpdate(ClientAsyncCmdBase):
- def agent_pool_ready(self):
- d=self.agentpool.callRemote("cmd",self.agents,self.command,self.options)
- d.addCallback(self.cb_async_result)
- d.addErrback(self.cb_global_error)
- class ClientRunOrTest(ClientAsyncCmdBase):
- MAX_ARCHIVE_SIZE=50*1024*1024
- UPLOAD_CHUNK_SIZE=256*1024
- PSYNC_SRC_EXTS=["c","cpp","S","spp","SPP","sx","s","asm","ASM","h","hpp","ipp","ino","pde","json","properties"]
- PSYNC_SKIP_DIRS=(".git",".svn",".hg","example","examples","test","tests")
- def __init__(self,*args,**kwargs):
- ClientAsyncCmdBase.__init__(self,*args,**kwargs)
- self.project_id=self.generate_project_id(self.options['project_dir'])
- self.psync=ProjectSync(self.options['project_dir'])
- def generate_project_id(self,path):
- id_=sha1(self.id)
- id_.update(path)
- return "%s-%s"%(basename(path),id_.hexdigest())
- def add_project_items(self,psync):
- project_dir=self.options['project_dir']
- with util.cd(project_dir):
- if self.options['force_remote']:
- target_dirs={"lib":util.get_projectlib_dir(),"src":util.get_projectsrc_dir()}
- if(set(["buildfs","uploadfs","uploadfsota"])&set(self.options.get("target",[]))):
- target_dirs['data']=util.get_projectdata_dir()
- for name,path in target_dirs.items():
- if isdir(path):
- psync.add_item(path,name,cb_filter=self._cb_tarfile_filter if name!="data" else None)
- else:
- build_dir=util.get_projectbuild_dir()
- for env_name in os.listdir(build_dir):
- env_dir=join(build_dir,env_name)
- if not isdir(env_dir):
- continue
- for fname in os.listdir(env_dir):
- bin_file=join(env_dir,fname)
- bin_exts=(".elf",".bin",".hex",".eep","program")
- if isfile(bin_file)and fname.endswith(bin_exts):
- psync.add_item(bin_file,join(".pioenvs",env_name,fname))
- if isdir(util.get_projectboards_dir()):
- psync.add_item(util.get_projectboards_dir(),"boards")
- if self.command=="test" and isdir(util.get_projecttest_dir()):
- psync.add_item(util.get_projecttest_dir(),"test")
- psync.add_item(join(project_dir,"platformio.ini"),"platformio.ini")
- def _cb_tarfile_filter(self,path):
- if isdir(path)and basename(path).lower()in self.PSYNC_SKIP_DIRS:
- return None
- if isfile(path)and not self.is_file_with_exts(path,self.PSYNC_SRC_EXTS):
- return None
- return path
- @staticmethod
- def is_file_with_exts(path,exts):
- if path.endswith(tuple(".%s"%e for e in exts)):
- return True
- return False
- def agent_pool_ready(self):
- self.psync_init()
- def psync_init(self):
- self.add_project_items(self.psync)
- d=self.agentpool.callRemote("cmd",self.agents,"psync",dict(id=self.project_id,items=[i[1]for i in self.psync.get_items()]))
- d.addCallback(self.cb_psync_init_result)
- d.addErrback(self.cb_global_error)
- self.psync.rebuild_dbindex()
- def cb_psync_init_result(self,result):
- self._acs_total=len(result)
- for(success,value)in result:
- if not success:
- raise pb.Error(value)
- agent_id,ac_id=value
- try:
- d=self.agentpool.callRemote("acwrite",agent_id,ac_id,dict(stage=PROJECT_SYNC_STAGE.DBINDEX.value))
- d.addCallback(self.cb_psync_dbindex_result,agent_id,ac_id)
- d.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- def cb_psync_dbindex_result(self,result,agent_id,ac_id):
- result=set(json.loads(zlib.decompress(result)))
- dbindex=set(self.psync.get_dbindex())
- delete=list(result-dbindex)
- delta=list(dbindex-result)
- self.log.debug("PSync: stats, total={total}, delete={delete}, delta={delta}",total=len(dbindex),delete=len(delete),delta=len(delta))
- if not delete and not delta:
- return self.psync_finalize(agent_id,ac_id)
- elif not delete:
- return self.psync_upload(agent_id,ac_id,delta)
- try:
- d=self.agentpool.callRemote("acwrite",agent_id,ac_id,dict(stage=PROJECT_SYNC_STAGE.DELETE.value,dbindex=zlib.compress(json.dumps(delete))))
- d.addCallback(self.cb_psync_delete_result,agent_id,ac_id,delta)
- d.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- return None
- def cb_psync_delete_result(self,result,agent_id,ac_id,dbindex):
- assert result
- self.psync_upload(agent_id,ac_id,dbindex)
- def psync_upload(self,agent_id,ac_id,dbindex):
- assert dbindex
- fileobj=BytesIO()
- compressed=self.psync.compress_items(fileobj,dbindex,self.MAX_ARCHIVE_SIZE)
- fileobj.seek(0)
- self.log.debug("PSync: upload project, size={size}",size=len(fileobj.getvalue()))
- self.psync_upload_chunk(agent_id,ac_id,list(set(dbindex)-set(compressed)),fileobj)
- def psync_upload_chunk(self,agent_id,ac_id,dbindex,fileobj):
- offset=fileobj.tell()
- total=fileobj.seek(0,os.SEEK_END)
- fileobj.seek(offset)
- chunk=fileobj.read(self.UPLOAD_CHUNK_SIZE)
- assert chunk
- try:
- d=self.agentpool.callRemote("acwrite",agent_id,ac_id,dict(stage=PROJECT_SYNC_STAGE.UPLOAD.value,chunk=chunk,length=len(chunk),total=total))
- d.addCallback(self.cb_psync_upload_chunk_result,agent_id,ac_id,dbindex,fileobj)
- d.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- def cb_psync_upload_chunk_result(self,result,agent_id,ac_id,dbindex,fileobj):
- result=PROJECT_SYNC_STAGE.lookupByValue(result)
- self.log.debug("PSync: upload chunk result {r}",r=str(result))
- assert result&(PROJECT_SYNC_STAGE.UPLOAD|PROJECT_SYNC_STAGE.EXTRACTED)
- if result is PROJECT_SYNC_STAGE.EXTRACTED:
- if dbindex:
- self.psync_upload(agent_id,ac_id,dbindex)
- else:
- self.psync_finalize(agent_id,ac_id)
- else:
- self.psync_upload_chunk(agent_id,ac_id,dbindex,fileobj)
- def psync_finalize(self,agent_id,ac_id):
- try:
- d=self.agentpool.callRemote("acclose",agent_id,ac_id)
- d.addCallback(self.cb_psync_completed_result,agent_id)
- d.addErrback(self.cb_global_error)
- except(AttributeError,pb.DeadReferenceError):
- self.disconnect(exit_code=1)
- def cb_psync_completed_result(self,result,agent_id):
- assert PROJECT_SYNC_STAGE.lookupByValue(result)
- options=self.options.copy()
- del options['project_dir']
- options['project_id']=self.project_id
- d=self.agentpool.callRemote("cmd",[agent_id],self.command,options)
- d.addCallback(self.cb_async_result)
- d.addErrback(self.cb_global_error)
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$remote_agent
- import json
- import os
- import zlib
- from io import BytesIO
- from os.path import getatime,getmtime,isdir,isfile,join
- from time import time
- from platformio import exception,util
- from twisted.internet import defer,protocol,reactor
- from twisted.internet.serialport import SerialPort
- from twisted.logger import LogLevel
- from twisted.spread import pb
- from project_sync import PROJECT_SYNC_STAGE,ProjectSync
- from remote_client import RemoteClientBase
- class Agent(RemoteClientBase):
- def __init__(self,name,share,working_dir=None):
- RemoteClientBase.__init__(self)
- self.log_level=LogLevel.info
- self.working_dir=working_dir or join(util.get_home_dir(),"remote")
- if not isdir(self.working_dir):
- os.makedirs(self.working_dir)
- if name:
- self.name=str(name)[:50]
- self.join_options.update({"agent":True,"share":[s.lower().strip()[:50]for s in share]})
- self._acs={}
- def agent_pool_ready(self):
- pass
- def cb_disconnected(self,reason):
- for ac in self._acs.values():
- ac.ac_close()
- RemoteClientBase.cb_disconnected(self,reason)
- def remote_acread(self,ac_id):
- self.log.debug("Async Read: {id}",id=ac_id)
- if ac_id not in self._acs:
- raise pb.Error("Invalid Async Identifier")
- return self._acs[ac_id].ac_read()
- def remote_acwrite(self,ac_id,data):
- self.log.debug("Async Write: {id}",id=ac_id)
- if ac_id not in self._acs:
- raise pb.Error("Invalid Async Identifier")
- return self._acs[ac_id].ac_write(data)
- def remote_acclose(self,ac_id):
- self.log.debug("Async Close: {id}",id=ac_id)
- if ac_id not in self._acs:
- raise pb.Error("Invalid Async Identifier")
- return_code=self._acs[ac_id].ac_close()
- del self._acs[ac_id]
- return return_code
- def remote_cmd(self,cmd,options):
- self.log.info("Remote command received: {cmd}",cmd=cmd)
- self.log.debug("Command options: {options!r}",options=options)
- callback="_process_cmd_%s"%cmd.replace(".","_")
- return getattr(self,callback)(options)
- def _defer_async_cmd(self,ac,pass_agent_name=True):
- self._acs[ac.id]=ac
- if pass_agent_name:
- return(self.id,ac.id,self.name)
- return(self.id,ac.id)
- def _process_cmd_reload(self,_):
- reactor.callLater(1,self.disconnect,14)
- return(self.id,self.name,time())
- def _process_cmd_device_list(self,_):
- return(self.name,util.get_serialports())
- def _process_cmd_device_monitor(self,options):
- if not options['port']:
- for item in util.get_serialports():
- if "VID:PID" in item['hwid']:
- options['port']=item['port']
- break
- if options['port']:
- for ac in self._acs.values():
- if(isinstance(ac,AsyncCmdSerial)and ac.options['port']==options['port']):
- self.log.info("Terminate previously opened monitor at {port}",port=options['port'])
- ac.ac_close()
- del self._acs[ac.id]
- if not options['port']:
- raise pb.Error("Please specify serial port using `--port` option")
- self.log.info("Starting serial monitor at {port}",port=options['port'])
- return self._defer_async_cmd(AsyncCmdSerial(options),pass_agent_name=False)
- def _process_cmd_psync(self,options):
- for ac in self._acs.values():
- if(isinstance(ac,AsyncCmdPSync)and ac.options['id']==options['id']):
- self.log.info("Terminate previous Project Sync process")
- ac.ac_close()
- del self._acs[ac.id]
- options['agent_working_dir']=self.working_dir
- return self._defer_async_cmd(AsyncCmdPSync(options),pass_agent_name=False)
- def _process_cmd_run(self,options):
- return self._process_cmd_run_or_test("run",options)
- def _process_cmd_test(self,options):
- return self._process_cmd_run_or_test("test",options)
- def _process_cmd_run_or_test(self,command,options):
- assert options and "project_id" in options
- project_dir=join(self.working_dir,"projects",options['project_id'])
- origin_pio_ini=join(project_dir,"platformio.ini")
- back_pio_ini=join(project_dir,"platformio.ini.bak")
- try:
- conf=util.load_project_config(project_dir)
- if isfile(back_pio_ini):
- os.remove(back_pio_ini)
- os.rename(origin_pio_ini,back_pio_ini)
- if conf.has_section("platformio"):
- for opt in conf.options("platformio"):
- if opt.endswith("_dir"):
- conf.remove_option("platformio",opt)
- with open(origin_pio_ini,"w")as fp:
- conf.write(fp)
- os.utime(origin_pio_ini,(getatime(back_pio_ini),getmtime(back_pio_ini)))
- except exception.NotPlatformIOProject()as e:
- raise pb.Error(str(e))
- cmd_args=["platformio","--force",command,"-d",project_dir]
- for env in options.get("environment",[]):
- cmd_args.extend(["-e",env])
- for target in options.get("target",[]):
- cmd_args.extend(["-t",target])
- for ignore in options.get("ignore",[]):
- cmd_args.extend(["-i",ignore])
- if options.get("upload_port",False):
- cmd_args.extend(["--upload-port",options.get("upload_port")])
- if options.get("test_port",False):
- cmd_args.extend(["--test-port",options.get("test_port")])
- if options.get("disable_auto_clean",False):
- cmd_args.append("--disable-auto-clean")
- if options.get("without_building",False):
- cmd_args.append("--without-building")
- if options.get("without_uploading",False):
- cmd_args.append("--without-uploading")
- if options.get("silent",False):
- cmd_args.append("-s")
- if options.get("verbose",False):
- cmd_args.append("-v")
- paused_acs=[]
- for ac in self._acs.values():
- if not isinstance(ac,AsyncCmdSerial):
- continue
- self.log.info("Pause active monitor at {port}",port=ac.options['port'])
- ac.pause()
- paused_acs.append(ac)
- def _cb_on_end():
- if isfile(back_pio_ini):
- if isfile(origin_pio_ini):
- os.remove(origin_pio_ini)
- os.rename(back_pio_ini,origin_pio_ini)
- for ac in paused_acs:
- ac.unpause()
- self.log.info("Unpause active monitor at {port}",port=ac.options['port'])
- return self._defer_async_cmd(AsyncCmdProcess({"executable":util.where_is_program("platformio"),"args":cmd_args},on_end_callback=_cb_on_end))
- def _process_cmd_update(self,options):
- cmd_args=["platformio","--force","update"]
- if options.get("only_check"):
- cmd_args.append("--only-check")
- return self._defer_async_cmd(AsyncCmdProcess({"executable":util.where_is_program("platformio"),"args":cmd_args}))
- class AsyncCommandBase(object):
- MAX_BUFFER_SIZE=1024*1024
- def __init__(self,options=None,on_end_callback=None):
- self.options=options or{}
- self.on_end_callback=on_end_callback
- self._buffer=""
- self._return_code=None
- self._d=None
- self._paused=False
- try:
- self.start()
- except Exception as e:
- raise pb.Error(str(e))
- @property
- def id(self):
- return id(self)
- def pause(self):
- self._paused=True
- self.stop()
- def unpause(self):
- self._paused=False
- self.start()
- def start(self):
- raise NotImplementedError
- def stop(self):
- self.transport.loseConnection()
- def _ac_ended(self):
- if self.on_end_callback:
- self.on_end_callback()
- if not self._d or self._d.called:
- self._d=None
- return
- if self._buffer:
- self._d.callback(self._buffer)
- else:
- self._d.callback(None)
- def _ac_ondata(self,data):
- self._buffer+=data
- if len(self._buffer)>self.MAX_BUFFER_SIZE:
- self._buffer=self._buffer[-1*self.MAX_BUFFER_SIZE:]
- if self._paused:
- return
- if self._d and not self._d.called:
- self._d.callback(self._buffer)
- self._buffer=""
- def ac_read(self):
- if self._buffer:
- result=self._buffer
- self._buffer=""
- return result
- elif self._return_code is None:
- self._d=defer.Deferred()
- return self._d
- return None
- def ac_write(self,data):
- self.transport.write(data)
- return len(data)
- def ac_close(self):
- self.stop()
- return self._return_code
- class AsyncCmdProcess(protocol.ProcessProtocol,AsyncCommandBase):
- def start(self):
- env=dict(os.environ).copy()
- env.update({'PLATFORMIO_FORCE_COLOR':'true'})
- reactor.spawnProcess(self,self.options['executable'],self.options['args'],env)
- def outReceived(self,data):
- self._ac_ondata(data)
- def errReceived(self,data):
- self._ac_ondata(data)
- def processExited(self,reason):
- self._return_code=reason.value.exitCode
- def processEnded(self,reason):
- if self._return_code is None:
- self._return_code=reason.value.exitCode
- self._ac_ended()
- class AsyncCmdSerial(protocol.Protocol,AsyncCommandBase):
- def start(self):
- SerialPort(self,reactor=reactor,**{"deviceNameOrPortNumber":self.options['port'],"baudrate":self.options['baud'],"parity":self.options['parity'],"rtscts":1 if self.options['rtscts']else 0,"xonxoff":1 if self.options['xonxoff']else 0})
- def connectionMade(self):
- self.reset_device()
- if self.options.get("rts",None)is not None:
- self.transport.setRTS(self.options.get("rts"))
- if self.options.get("dtr",None)is not None:
- self.transport.setDTR(self.options.get("dtr"))
- def reset_device(self):
- from time import sleep
- self.transport.flushInput()
- self.transport.setDTR(False)
- self.transport.setRTS(False)
- sleep(0.1)
- self.transport.setDTR(True)
- self.transport.setRTS(True)
- sleep(0.1)
- def dataReceived(self,data):
- self._ac_ondata(data)
- def connectionLost(self,reason):
- if self._paused:
- return
- self._return_code=0
- self._ac_ended()
- class AsyncCmdPSync(AsyncCommandBase):
- def __init__(self,*args,**kwargs):
- self.psync=None
- self._upstream=None
- AsyncCommandBase.__init__(self,*args,**kwargs)
- def start(self):
- project_dir=join(self.options['agent_working_dir'],"projects",self.options['id'])
- self.psync=ProjectSync(project_dir)
- for name in self.options['items']:
- self.psync.add_item(join(project_dir,name),name)
- def stop(self):
- self.psync=None
- self._upstream=None
- self._return_code=PROJECT_SYNC_STAGE.COMPLETED.value
- def ac_write(self,data):
- stage=PROJECT_SYNC_STAGE.lookupByValue(data.get("stage"))
- if stage is PROJECT_SYNC_STAGE.DBINDEX:
- self.psync.rebuild_dbindex()
- return zlib.compress(json.dumps(self.psync.get_dbindex()))
- elif stage is PROJECT_SYNC_STAGE.DELETE:
- return self.psync.delete_dbindex(json.loads(zlib.decompress(data['dbindex'])))
- elif stage is PROJECT_SYNC_STAGE.UPLOAD:
- if not self._upstream:
- self._upstream=BytesIO()
- self._upstream.write(data['chunk'])
- if self._upstream.tell()==data['total']:
- self.psync.decompress_items(self._upstream)
- self._upstream=None
- return PROJECT_SYNC_STAGE.EXTRACTED.value
- return PROJECT_SYNC_STAGE.UPLOAD.value
- return None
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$cmd_remote
- import os
- import click
- from platformio.commands.run import cli as cmd_run
- import cmd_test
- import remote_agent
- import remote_client
- @click.group(short_help="PIO Remote")
- @click.option("-a","--agent",multiple=True)
- @click.pass_context
- def cli(ctx,agent):
- ctx.obj=agent
- @cli.group("agent",short_help="Start new agent or list started")
- def cmd_remote_agent():
- pass
- @cmd_remote_agent.command("start",short_help="Start agent")
- @click.option("-n","--name")
- @click.option("-s","--share",multiple=True,metavar="E-MAIL")
- @click.option("-d","--working-dir",envvar="PLATFORMIO_REMOTE_AGENT_DIR",type=click.Path(file_okay=False,dir_okay=True,writable=True,resolve_path=True))
- def cmd_remote_agent_start(name,share,working_dir):
- remote_agent.Agent(name,share,working_dir).connect()
- @cmd_remote_agent.command("reload",short_help="Reload agents")
- @click.pass_obj
- def cmd_remote_agent_reload(agents):
- remote_client.ClientAgentReload(agents).connect()
- @cmd_remote_agent.command("list",short_help="List active agents")
- def cmd_remote_agent_list():
- remote_client.ClientAgentList().connect()
- @cli.command("update",short_help="Update installed Platforms, Packages and Libraries")
- @click.option("-c","--only-check",is_flag=True,help="Do not update, only check for new version")
- @click.pass_obj
- def cmd_remote_update(agents,only_check):
- remote_client.ClientUpdate("update",agents,dict(only_check=only_check)).connect()
- @cli.command("run",short_help="Process project environments remotely")
- @click.option("-e","--environment",multiple=True)
- @click.option("-t","--target",multiple=True)
- @click.option("--upload-port")
- @click.option("-d","--project-dir",default=os.getcwd,type=click.Path(exists=True,file_okay=True,dir_okay=True,writable=True,resolve_path=True))
- @click.option("--disable-auto-clean",is_flag=True)
- @click.option("-r","--force-remote",is_flag=True)
- @click.option("-s","--silent",is_flag=True)
- @click.option("-v","--verbose",is_flag=True)
- @click.pass_obj
- @click.pass_context
- def cmd_remote_run(ctx,agents,environment,target,upload_port,project_dir,disable_auto_clean,force_remote,silent,verbose):
- cr=remote_client.ClientRunOrTest("run",agents,dict(environment=environment,target=target,upload_port=upload_port,project_dir=project_dir,disable_auto_clean=disable_auto_clean,force_remote=force_remote,silent=silent,verbose=verbose))
- if force_remote:
- return cr.connect()
- click.secho("Building project locally",bold=True)
- local_targets=[]
- if "clean" in target:
- local_targets=["clean"]
- elif set(["buildfs","uploadfs","uploadfsota"])&set(target):
- local_targets=["buildfs"]
- else:
- local_targets=["checkprogsize","buildprog"]
- ctx.invoke(cmd_run,environment=environment,target=local_targets,project_dir=project_dir,silent=silent,verbose=verbose)
- if any(["upload" in t for t in target]+["program" in target]):
- click.secho("Uploading firmware remotely",bold=True)
- cr.options['target']+=("nobuild",)
- cr.options['disable_auto_clean']=True
- cr.connect()
- return True
- @cli.command("test",short_help="Remote Unit Testing")
- @click.option("--environment","-e",multiple=True,metavar="<environment>")
- @click.option("--ignore","-i",multiple=True,metavar="<pattern>")
- @click.option("--upload-port")
- @click.option("--test-port")
- @click.option("-d","--project-dir",default=os.getcwd,type=click.Path(exists=True,file_okay=False,dir_okay=True,writable=True,resolve_path=True))
- @click.option("-r","--force-remote",is_flag=True)
- @click.option("--without-building",is_flag=True)
- @click.option("--without-uploading",is_flag=True)
- @click.option("--verbose","-v",is_flag=True)
- @click.pass_obj
- @click.pass_context
- def cmd_remote_test(ctx,agents,environment,ignore,upload_port,test_port,project_dir,force_remote,without_building,without_uploading,verbose):
- cr=remote_client.ClientRunOrTest("test",agents,dict(environment=environment,ignore=ignore,upload_port=upload_port,test_port=test_port,project_dir=project_dir,force_remote=force_remote,without_building=without_building,without_uploading=without_uploading,verbose=verbose))
- if force_remote:
- return cr.connect()
- click.secho("Building project locally",bold=True)
- ctx.invoke(cmd_test.cli,environment=environment,ignore=ignore,project_dir=project_dir,without_uploading=True,without_testing=True,verbose=verbose)
- click.secho("Testing project remotely",bold=True)
- cr.options['without_building']=True
- cr.connect()
- return True
- @cli.group("device",short_help="Monitor remote device or list existing")
- def cmd_remote_device():
- pass
- @cmd_remote_device.command("list",short_help="List remote devices")
- @click.option("--json-output",is_flag=True)
- @click.pass_obj
- def cmd_device_list(agents,json_output):
- remote_client.ClientDeviceList(agents,json_output).connect()
- @cmd_remote_device.command("monitor",short_help="Monitor remote device")
- @click.option("--port","-p",help="Port, a number or a device name")
- @click.option("--baud","-b",type=int,default=9600,help="Set baud rate, default=9600")
- @click.option("--parity",default="N",type=click.Choice(["N","E","O","S","M"]),help="Set parity, default=N")
- @click.option("--rtscts",is_flag=True,help="Enable RTS/CTS flow control, default=Off")
- @click.option("--xonxoff",is_flag=True,help="Enable software flow control, default=Off")
- @click.option("--rts",default=None,type=click.IntRange(0,1),help="Set initial RTS line state")
- @click.option("--dtr",default=None,type=click.IntRange(0,1),help="Set initial DTR line state")
- @click.option("--echo",is_flag=True,help="Enable local echo, default=Off")
- @click.option("--encoding",default="UTF-8",help="Set the encoding for the serial port (e.g. hexlify, " "Latin1, UTF-8), default: UTF-8")
- @click.option("--filter","-f",multiple=True,help="Add text transformation")
- @click.option("--eol",default="CRLF",type=click.Choice(["CR","LF","CRLF"]),help="End of line mode, default=CRLF")
- @click.option("--raw",is_flag=True,help="Do not apply any encodings/transformations")
- @click.option("--exit-char",type=int,default=3,help="ASCII code of special character that is used to exit " "the application, default=3 (Ctrl+C)")
- @click.option("--menu-char",type=int,default=20,help="ASCII code of special character that is used to " "control miniterm (menu), default=20 (DEC)")
- @click.option("--quiet",is_flag=True,help="Diagnostics: suppress non-error messages, default=Off")
- @click.option("--sock",type=click.Path(exists=True,file_okay=False,dir_okay=True,writable=True,resolve_path=True))
- @click.pass_obj
- def cmd_device_monitor(agents,**kwargs):
- remote_client.ClientDeviceMonitor(agents,**kwargs).connect()
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$cmd_debug
- import json
- import os
- import re
- import sys
- import time
- from contextlib import contextmanager
- from fnmatch import fnmatch
- from hashlib import sha1
- from io import BytesIO
- from os.path import abspath,basename,dirname,isdir,isfile,join,splitext
- from tempfile import mkdtemp
- import click
- from platformio import VERSION,app,exception,util
- from platformio.commands.platform import platform_install as cmd_platform_install
- from platformio.commands.run import cli as cmd_run
- from platformio.managers.platform import PlatformFactory
- from platformio.telemetry import MeasurementProtocol
- from twisted.internet import protocol,reactor,stdio,task
- LOG_FILE=None
- class GDBBytesIO(BytesIO):
- STDOUT=sys.stdout
- def write(self,text):
- for line in text.strip().split("\n"):
- self.STDOUT.write('~"%s\\n"\n'%line)
- self.STDOUT.flush()
- def is_mi_mode(args):
- return "--interpreter" in " ".join(args)
- @click.command("debug",context_settings=dict(ignore_unknown_options=True),short_help="PIO Unified Debugger")
- @click.option("-d","--project-dir",default=os.getcwd,type=click.Path(exists=True,file_okay=False,dir_okay=True,writable=True,resolve_path=True))
- @click.option("--environment","-e",metavar="<environment>")
- @click.option("--verbose","-v",is_flag=True)
- @click.option("--interface",type=click.Choice(["gdb"]))
- @click.argument("__unprocessed",nargs=-1,type=click.UNPROCESSED)
- @click.pass_context
- def cli(ctx,project_dir,environment,verbose,interface,__unprocessed):
- try:
- util.ensure_udev_rules()
- except NameError:
- pass
- except exception.InvalidUdevRules as e:
- for line in str(e).split("\n")+[""]:
- click.echo(('~"%s\\n"' if is_mi_mode(__unprocessed)else "%s")%line)
- if not util.is_platformio_project(project_dir)and os.getenv("CWD"):
- project_dir=os.getenv("CWD")
- with util.cd(project_dir):
- env_name=check_env_name(project_dir,environment)
- env_options=get_env_options(project_dir,env_name)
- if not set(env_options.keys())>=set(["platform","board"]):
- raise exception.ProjectEnvsNotAvailable()
- debug_options=validate_debug_options(ctx,env_options)
- assert debug_options
- if not interface:
- return predebug_project(ctx,project_dir,env_name,False,verbose)
- configuration=load_configuration(ctx,project_dir,env_name)
- if not configuration:
- raise exception.DebugInvalidOptions("Could not load debug configuration")
- if "--version" in __unprocessed:
- result=util.exec_command([configuration['gdb_path'],"--version"])
- if result['returncode']==0:
- return click.echo(result['out'])
- raise exception.PlatformioException("\n".join([result['out'],result['err']]))
- debug_options['load_cmd']=configure_esp32_load_cmd(debug_options,configuration)
- rebuild_prog=False
- preload=debug_options['load_cmd']=="preload"
- load_mode=debug_options['load_mode']
- if load_mode=="always":
- rebuild_prog=(preload or not has_debug_symbols(configuration['prog_path']))
- elif load_mode=="modified":
- rebuild_prog=(is_prog_obsolete(configuration['prog_path'])or not has_debug_symbols(configuration['prog_path']))
- else:
- rebuild_prog=not isfile(configuration['prog_path'])
- if preload or(not rebuild_prog and load_mode!="always"):
- debug_options['load_cmd']=None
- if rebuild_prog:
- if is_mi_mode(__unprocessed):
- output=GDBBytesIO()
- click.echo('~"Preparing firmware for debugging...\\n"')
- with capture_std_streams(output):
- predebug_project(ctx,project_dir,env_name,preload,verbose)
- output.close()
- else:
- click.echo("Preparing firmware for debugging...")
- predebug_project(ctx,project_dir,env_name,preload,verbose)
- if load_mode=="modified":
- is_prog_obsolete(configuration['prog_path'])
- if not isfile(configuration['prog_path']):
- raise exception.DebugInvalidOptions("Program/firmware is missed")
- client=GDBClient(project_dir,__unprocessed,debug_options,env_options)
- client.spawn(configuration['gdb_path'],configuration['prog_path'])
- reactor.run()
- return True
- def escape_path(path):
- return path.replace("\\","/")
- @contextmanager
- def capture_std_streams(stdout,stderr=None):
- _stdout=sys.stdout
- _stderr=sys.stderr
- sys.stdout=stdout
- sys.stderr=stderr or stdout
- yield
- sys.stdout=_stdout
- sys.stderr=_stderr
- def is_prog_obsolete(prog_path):
- prog_hash_path=prog_path+".sha1"
- if not isfile(prog_path):
- return True
- shasum=sha1()
- with open(prog_path,"rb")as fp:
- while True:
- data=fp.read(1024)
- if not data:
- break
- shasum.update(data)
- new_digest=shasum.hexdigest()
- old_digest=None
- if isfile(prog_hash_path):
- with open(prog_hash_path,"r")as fp:
- old_digest=fp.read()
- if new_digest==old_digest:
- return False
- with open(prog_hash_path,"w")as fp:
- fp.write(new_digest)
- return True
- def has_debug_symbols(prog_path):
- if not isfile(prog_path):
- return False
- matched={".debug_info":False,".debug_abbrev":False," -Og":False," -g":False,"__PLATFORMIO_DEBUG__":(3,6)>VERSION[:2]}
- with open(prog_path,"rb")as fp:
- last_data=""
- while True:
- data=fp.read(1024)
- if not data:
- break
- for pattern,found in matched.items():
- if found:
- continue
- if pattern in last_data+data:
- matched[pattern]=True
- last_data=data
- return all(matched.values())
- def predebug_project(ctx,project_dir,env_name,preload,verbose):
- ctx.invoke(cmd_run,project_dir=project_dir,environment=[env_name],target=["__debug"]+(["upload"]if preload else[]),verbose=verbose)
- if preload:
- time.sleep(5)
- def get_env_options(project_dir,environment):
- config=util.load_project_config(project_dir)
- options={}
- for k,v in config.items("env:%s"%environment):
- options[k]=v
- return options
- def check_env_name(project_dir,environment):
- config=util.load_project_config(project_dir)
- envs=[]
- for section in config.sections():
- if section.startswith("env:"):
- envs.append(section[4:])
- if not envs:
- raise exception.ProjectEnvsNotAvailable()
- if not environment and config.has_option("platformio","env_default"):
- environment=config.get("platformio","env_default").split(", ")[0]
- if environment:
- if environment in envs:
- return environment
- raise exception.UnknownEnvNames(environment,envs)
- return envs[0]
- def validate_debug_options(cmd_ctx,env_options):
- def _cleanup_cmds(cmds):
- if not cmds:
- return[]
- if not isinstance(cmds,list):
- cmds=cmds.split("\n")
- return[c.strip()for c in cmds if c.strip()]
- try:
- platform=PlatformFactory.newPlatform(env_options['platform'])
- except exception.UnknownPlatform:
- cmd_ctx.invoke(cmd_platform_install,platforms=[env_options['platform']],skip_default_package=True)
- platform=PlatformFactory.newPlatform(env_options['platform'])
- board_config=platform.board_config(env_options['board'])
- tool_name=board_config.get_debug_tool_name(env_options.get("debug_tool"))
- tool_settings=board_config.get("debug",{}).get("tools",{}).get(tool_name,{})
- server_options=None
- if isinstance(tool_settings.get("server",{}),list):
- for item in tool_settings['server'][:]:
- tool_settings['server']=item
- if util.get_systype()in item.get("system",[]):
- break
- if env_options.get("debug_server"):
- server_options={"cwd":None,"executable":None,"arguments":env_options.get("debug_server")}
- if not isinstance(server_options['arguments'],list):
- server_options['arguments']=server_options['arguments'].split("\n")
- server_options['arguments']=[arg.strip()for arg in server_options['arguments']if arg.strip()]
- server_options['executable']=server_options['arguments'][0]
- server_options['arguments']=server_options['arguments'][1:]
- elif "server" in tool_settings:
- server_package=tool_settings['server'].get("package")
- server_package_dir=platform.get_package_dir(server_package)if server_package else None
- if server_package and not server_package_dir:
- platform.install_packages(with_packages=[server_package],skip_default_package=True,silent=True)
- server_package_dir=platform.get_package_dir(server_package)
- server_options=dict(cwd=server_package_dir if server_package else None,executable=tool_settings['server'].get("executable"),arguments=[a.replace("$PACKAGE_DIR",escape_path(server_package_dir))if server_package_dir else a for a in tool_settings['server'].get("arguments",[])])
- extra_cmds=_cleanup_cmds(env_options.get("debug_extra_cmds"))
- extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
- result=dict(tool=tool_name,upload_protocol=env_options.get("upload_protocol",board_config.get("upload",{}).get("protocol")),load_cmd=env_options.get("debug_load_cmd",tool_settings.get("load_cmd","load")),load_mode=env_options.get("debug_load_mode",tool_settings.get("load_mode","always")),init_break=env_options.get("debug_init_break",tool_settings.get("init_break","tbreak main")),init_cmds=_cleanup_cmds(env_options.get("debug_init_cmds",tool_settings.get("init_cmds"))),extra_cmds=extra_cmds,require_debug_port=tool_settings.get("require_debug_port",False),port=reveal_debug_port(env_options.get("debug_port",tool_settings.get("port")),tool_name,tool_settings),server=server_options)
- return result
- def reveal_debug_port(env_debug_port,tool_name,tool_settings):
- def _get_pattern():
- if not env_debug_port:
- return None
- if set(["*","?","[","]"])&set(env_debug_port):
- return env_debug_port
- return None
- def _is_match_pattern(port):
- pattern=_get_pattern()
- if not pattern:
- return True
- return fnmatch(port,pattern)
- def _look_for_serial_port(hwids):
- for item in util.get_serialports(filter_hwid=True):
- if not _is_match_pattern(item['port']):
- continue
- port=item['port']
- if tool_name.startswith("blackmagic"):
- if "windows" in util.get_systype()and port.startswith("COM")and len(port)>4:
- port="\\\\.\\%s"%port
- if "GDB" in item['description']:
- return port
- for hwid in hwids:
- hwid_str=("%s:%s"%(hwid[0],hwid[1])).replace("0x","")
- if hwid_str in item['hwid']:
- return port
- return None
- if env_debug_port and not _get_pattern():
- return env_debug_port
- if not tool_settings.get("require_debug_port"):
- return None
- debug_port=_look_for_serial_port(tool_settings.get("hwids",[]))
- if not debug_port:
- raise exception.DebugInvalidOptions("Please specify `debug_port` for environment")
- return debug_port
- def load_configuration(ctx,project_dir,env_name):
- output=BytesIO()
- with capture_std_streams(output):
- ctx.invoke(cmd_run,project_dir=project_dir,environment=[env_name],target=["idedata"])
- result=output.getvalue()
- output.close()
- if '"includes":' not in result:
- return None
- for line in result.split("\n"):
- line=line.strip()
- if line.startswith('{"')and "cxx_path" in line:
- return json.loads(line[:line.rindex("}")+1])
- return None
- def configure_esp32_load_cmd(debug_options,configuration):
- ignore_conds=[debug_options['load_cmd']!="load","xtensa-esp32" not in configuration.get("cc_path",""),not configuration.get("flash_extra_images"),not all([isfile(item['path'])for item in configuration.get("flash_extra_images")])]
- if any(ignore_conds):
- return debug_options['load_cmd']
- mon_cmds=['monitor program_esp32 "{{{path}}}" {offset} verify'.format(path=escape_path(item['path']),offset=item['offset'])for item in configuration.get("flash_extra_images")]
- mon_cmds.append('monitor program_esp32 "{%s.bin}" 0x10000 verify'%escape_path(configuration['prog_path'][:-4]))
- return "\n".join(mon_cmds)
- class BaseProcess(protocol.ProcessProtocol):
- STDOUT_CHUNK_SIZE=2048
- COMMON_PATTERNS={"PLATFORMIO_HOME_DIR":escape_path(util.get_home_dir()),"PYTHONEXE":os.getenv("PYTHONEXEPATH","")}
- def apply_patterns(self,source,patterns=None):
- _patterns=self.COMMON_PATTERNS.copy()
- _patterns.update(patterns or{})
- def _replace(text):
- for key,value in _patterns.items():
- pattern="$%s"%key
- text=text.replace(pattern,value or "")
- return text
- if isinstance(source,basestring):
- source=_replace(source)
- elif isinstance(source,(list,dict)):
- items=enumerate(source)if isinstance(source,list)else source.items()
- for key,value in items:
- if isinstance(value,basestring):
- source[key]=_replace(value)
- elif isinstance(value,(list,dict)):
- source[key]=self.apply_patterns(value,patterns)
- return source
- def outReceived(self,data):
- if LOG_FILE:
- with open(LOG_FILE,"a")as fp:
- fp.write(data)
- while data:
- chunk=data[:self.STDOUT_CHUNK_SIZE]
- click.echo(chunk,nl=False)
- data=data[self.STDOUT_CHUNK_SIZE:]
- def errReceived(self,data):
- if LOG_FILE:
- with open(LOG_FILE,"a")as fp:
- fp.write(data)
- click.echo(data,nl=False,err=True)
- class DebugServer(BaseProcess):
- def __init__(self,debug_options,env_options):
- self.debug_options=debug_options
- self.env_options=env_options
- self._debug_port=None
- self._transport=None
- def spawn(self,patterns):
- systype=util.get_systype()
- server=self.debug_options.get("server")
- if not server:
- return None
- server=self.apply_patterns(server,patterns)
- server_executable=server['executable']
- if not server_executable:
- return None
- if server['cwd']:
- server_executable=join(server['cwd'],server_executable)
- if("windows" in systype and not server_executable.endswith(".exe")and isfile(server_executable+".exe")):
- server_executable=server_executable+".exe"
- if not isfile(server_executable):
- server_executable=util.where_is_program(server_executable)
- if not isfile(server_executable):
- raise exception.DebugInvalidOptions("\nCould not launch Debug Server '%s'. Please check that it " "is installed and is included in a system PATH\n\n" "See documentation or contact support@pioplus.com:\n" "http://docs.platformio.org/page/plus/debugging.html\n"%server_executable)
- self._debug_port=":3333"
- openocd_pipe_allowed=all([not self.debug_options['port'],"openocd" in server_executable,self.env_options['platform']!="riscv"])
- if openocd_pipe_allowed:
- args=[]
- if server['cwd']:
- args.extend(["-s",escape_path(server['cwd'])])
- args.extend(["-c","gdb_port pipe; tcl_port disabled; telnet_port disabled"])
- args.extend(server['arguments'])
- str_args=" ".join([arg if arg.startswith("-")else '"%s"'%arg for arg in args])
- self._debug_port='| "%s" %s'%(escape_path(server_executable),str_args)
- else:
- env=os.environ.copy()
- if("windows" not in systype and server['cwd']and isdir(join(server['cwd'],"lib"))):
- ld_key=("DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH")
- env[ld_key]=join(server['cwd'],"lib")
- if os.environ.get(ld_key):
- env[ld_key]="%s:%s"%(env[ld_key],os.environ.get(ld_key))
- if server['cwd']and isdir(join(server['cwd'],"bin")):
- env['PATH']="%s%s%s"%(join(server['cwd'],"bin"),os.pathsep,os.environ.get("PATH",os.environ.get("Path","")))
- self._transport=reactor.spawnProcess(self,server_executable,[server_executable]+server['arguments'],path=server['cwd'],env=env)
- if "mspdebug" in server_executable.lower():
- self._debug_port=":2000"
- elif "jlink" in server_executable.lower():
- self._debug_port=":2331"
- return self._transport
- def get_debug_port(self):
- return self._debug_port
- def terminate(self):
- if self._transport:
- self._transport.signalProcess("KILL")
- class GDBClient(BaseProcess):
- PIO_SRC_NAME=".pioinit"
- INIT_COMPLETED_BANNER="PlatformIO: Initialization completed"
- DEFAULT_INIT_CONFIG="""
- define pio_reset_halt_target
- monitor reset halt
- end
- define pio_reset_target
- monitor reset
- end
- target extended-remote $DEBUG_PORT
- $INIT_BREAK
- pio_reset_halt_target
- $LOAD_CMD
- monitor init
- pio_reset_halt_target
- """
- STUTIL_INIT_CFG="""
- define pio_reset_halt_target
- monitor halt
- monitor reset
- end
- define pio_reset_target
- monitor reset
- end
- target extended-remote $DEBUG_PORT
- $INIT_BREAK
- pio_reset_halt_target
- $LOAD_CMD
- pio_reset_halt_target
- """
- JLINK_INIT_CONFIG="""
- define pio_reset_halt_target
- monitor halt
- monitor reset
- end
- define pio_reset_target
- monitor reset
- end
- target extended-remote $DEBUG_PORT
- $INIT_BREAK
- pio_reset_halt_target
- $LOAD_CMD
- pio_reset_halt_target
- """
- BLACKMAGIC_INIT_CONFIG="""
- define pio_reset_halt_target
- set language c
- set *0xE000ED0C = 0x05FA0004
- set $busy = (*0xE000ED0C & 0x4)
- while ($busy)
- set $busy = (*0xE000ED0C & 0x4)
- end
- set language auto
- end
- define pio_reset_target
- pio_reset_halt_target
- end
- target extended-remote $DEBUG_PORT
- monitor swdp_scan
- attach 1
- set mem inaccessible-by-default off
- $INIT_BREAK
- $LOAD_CMD
- set language c
- set *0xE000ED0C = 0x05FA0004
- set $busy = (*0xE000ED0C & 0x4)
- while ($busy)
- set $busy = (*0xE000ED0C & 0x4)
- end
- set language auto
- """
- MSPDEBUG_INIT_CFG="""
- define pio_reset_halt_target
- end
- define pio_reset_target
- end
- target extended-remote $DEBUG_PORT
- $INIT_BREAK
- monitor erase
- $LOAD_CMD
- pio_reset_halt_target
- """
- def __init__(self,project_dir,args,debug_options,env_options):
- self.project_dir=project_dir
- self.args=list(args)
- self.debug_options=debug_options
- self.env_options=env_options
- self._debug_server=DebugServer(debug_options,env_options)
- self._gdbsrc_dir=mkdtemp()
- self._session_id=None
- self._target_is_run=False
- self._last_server_activity=0
- self._auto_continue_timer=None
- def spawn(self,gdb_path,prog_path):
- self._session_id=sha1(gdb_path+prog_path).hexdigest()
- self._kill_previous_session()
- patterns={"PROJECT_DIR":escape_path(self.project_dir),"PROG_PATH":escape_path(prog_path),"PROG_DIR":escape_path(dirname(prog_path)),"PROG_NAME":basename(splitext(prog_path)[0]),"DEBUG_PORT":self.debug_options['port'],"UPLOAD_PROTOCOL":self.debug_options['upload_protocol'],"INIT_BREAK":self.debug_options['init_break']or "","LOAD_CMD":self.debug_options['load_cmd']or "",}
- self._debug_server.spawn(patterns)
- if not patterns['DEBUG_PORT']:
- patterns['DEBUG_PORT']=self._debug_server.get_debug_port()
- self.generate_pioinit(self._gdbsrc_dir,patterns)
- args=["piogdb","-q","--directory",self._gdbsrc_dir,"--directory",self.project_dir,"-l","10"]
- args.extend(self.args)
- if not gdb_path:
- raise exception.DebugInvalidOptions("GDB client is not configured")
- gdb_data_dir=self._get_data_dir(gdb_path)
- if gdb_data_dir:
- args.extend(["--data-directory",gdb_data_dir])
- args.append(patterns['PROG_PATH'])
- return reactor.spawnProcess(self,gdb_path,args,path=self.project_dir,env=os.environ)
- @staticmethod
- def _get_data_dir(gdb_path):
- if "msp430" in gdb_path:
- return None
- gdb_data_dir=abspath(join(dirname(gdb_path),"..","share","gdb"))
- return gdb_data_dir if isdir(gdb_data_dir)else None
- def generate_pioinit(self,dst_dir,patterns):
- server_exe=(self.debug_options.get("server")or{}).get("executable","").lower()
- if "jlink" in server_exe:
- cfg=self.JLINK_INIT_CONFIG
- elif "st-util" in server_exe:
- cfg=self.STUTIL_INIT_CFG
- elif "mspdebug" in server_exe:
- cfg=self.MSPDEBUG_INIT_CFG
- elif self.debug_options['require_debug_port']:
- cfg=self.BLACKMAGIC_INIT_CONFIG
- else:
- cfg=self.DEFAULT_INIT_CONFIG
- commands=cfg.split("\n")
- if self.debug_options['init_cmds']:
- commands=self.debug_options['init_cmds']
- commands.extend(self.debug_options['extra_cmds'])
- if not any("define pio_reset_target" in cmd for cmd in commands):
- commands=["define pio_reset_target"," echo Warning! Undefined pio_reset_target command\\n"," mon reset","end"]+commands
- if not any("define pio_reset_halt_target" in cmd for cmd in commands):
- commands=["define pio_reset_halt_target"," echo Warning! Undefined pio_reset_halt_target command\\n"," mon reset halt","end"]+commands
- if not any("define pio_restart_target" in cmd for cmd in commands):
- commands+=["define pio_restart_target"," pio_reset_halt_target"," $INIT_BREAK"," %s"%("continue" if patterns['INIT_BREAK']else "next"),"end"]
- banner=["echo PlatformIO Unified Debugger > http://bit.ly/pio-debug\\n","echo PlatformIO: Initializing remote target...\\n"]
- footer=["echo %s\\n"%self.INIT_COMPLETED_BANNER]
- commands=banner+commands+footer
- with open(join(dst_dir,self.PIO_SRC_NAME),"w")as fp:
- fp.write("\n".join(self.apply_patterns(commands,patterns)))
- def connectionMade(self):
- self._lock_session(self.transport.pid)
- p=protocol.Protocol()
- p.dataReceived=self.onStdInData
- stdio.StandardIO(p)
- def onStdInData(self,data):
- self._last_server_activity=time.time()
- if LOG_FILE:
- with open(LOG_FILE,"a")as fp:
- fp.write(data)
- if "-exec-run" in data:
- if self._target_is_run:
- token,_=data.split("-",1)
- self.outReceived("%s^running\n"%token)
- return
- data=data.replace("-exec-run","-exec-continue")
- if "-exec-continue" in data:
- self._target_is_run=True
- if "-gdb-exit" in data or data=="quit":
- self.transport.write("pio_reset_target\n")
- self.transport.write(data)
- def processEnded(self,reason):
- self._unlock_session()
- if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
- util.rmtree_(self._gdbsrc_dir)
- if self._debug_server:
- self._debug_server.terminate()
- reactor.stop()
- def outReceived(self,data):
- self._last_server_activity=time.time()
- BaseProcess.outReceived(self,data)
- self._handle_error(data)
- if self.INIT_COMPLETED_BANNER in data:
- self._auto_continue_timer=task.LoopingCall(self._auto_exec_continue)
- self._auto_continue_timer.start(0.1)
- def errReceived(self,data):
- BaseProcess.errReceived(self,data)
- self._handle_error(data)
- def console_log(self,msg):
- if is_mi_mode(self.args):
- self.outReceived('~"%s\\n"\n'%msg)
- else:
- self.outReceived("%s\n"%msg)
- def _auto_exec_continue(self):
- auto_exec_delay=0.5
- if self._last_server_activity>(time.time()-auto_exec_delay):
- return
- if self._auto_continue_timer:
- self._auto_continue_timer.stop()
- self._auto_continue_timer=None
- if not self.debug_options['init_break']or self._target_is_run:
- return
- self.console_log("PlatformIO: Resume the execution to `debug_init_break = %s`"%self.debug_options['init_break'])
- self.transport.write("0-exec-continue\n" if is_mi_mode(self.args)else "continue\n")
- self._target_is_run=True
- def _handle_error(self,data):
- if self.PIO_SRC_NAME not in data or "Error in sourced" not in data:
- return
- configuration={"debug":self.debug_options,"env":self.env_options}
- exd=re.sub(r'\\(?!")',"/",json.dumps(configuration))
- exd=re.sub(r'"(?:[a-z]\:)?((/[^"/]+)+)"',lambda m:'"%s"'%join(*m.group(1).split("/")[-2:]),exd,re.I|re.M)
- mp=MeasurementProtocol()
- mp['exd']="DebugGDBPioInitError: %s"%exd
- mp['exf']=1
- mp.send("exception")
- self.transport.loseConnection()
- def _kill_previous_session(self):
- assert self._session_id
- pid=None
- with app.ContentCache()as cc:
- pid=cc.get(self._session_id)
- cc.delete(self._session_id)
- if not pid:
- return
- if "windows" in util.get_systype():
- kill=["Taskkill","/PID",pid,"/F"]
- else:
- kill=["kill",pid]
- try:
- util.exec_command(kill)
- except:
- pass
- def _lock_session(self,pid):
- if not self._session_id:
- return
- with app.ContentCache()as cc:
- cc.set(self._session_id,str(pid),"1h")
- def _unlock_session(self):
- if not self._session_id:
- return
- with app.ContentCache()as cc:
- cc.delete(self._session_id)
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$cmd_home
- import glob
- import json
- import os
- import re
- import shutil
- import socket
- import sys
- import time
- from os.path import(basename,expanduser,getmtime,isdir,isfile,join,realpath,sep)
- import click
- import requests
- from bs4 import BeautifulSoup
- from platformio import __version__,app,exception,util
- from platformio.ide.projectgenerator import ProjectGenerator
- from platformio.managers.core import get_core_package_dir
- from platformio.managers.platform import PlatformManager
- from twisted.internet import defer,reactor
- from twisted.internet.utils import getProcessOutputAndValue
- from twisted.web import server,static
- from txjason import handler as jsonRPCHandler
- from txjason.protocol import JSONRPCServerFactory
- from txjason.service import JSONRPCError
- from txsockjs.factory import SockJSResource
- import cmd_account
- import requests_threads
- try:
- from configparser import Error as ConfigParserError
- except ImportError:
- from ConfigParser import Error as ConfigParserError
- @util.memoized(expire=5000)
- def requests_session():
- return requests_threads.AsyncSession(n=5)
- @util.memoized()
- def get_core_fullpath():
- return util.where_is_program("platformio"+(".exe" if "windows" in util.get_systype()else ""))
- @util.memoized(expire=10000)
- def is_twitter_blocked():
- ip="104.244.42.1"
- timeout=2
- try:
- if os.getenv("HTTP_PROXY",os.getenv("HTTPS_PROXY")):
- requests.get("http://%s"%ip,allow_redirects=False,timeout=timeout)
- else:
- socket.socket(socket.AF_INET,socket.SOCK_STREAM).connect((ip,80))
- return False
- except:
- pass
- return True
- class AppRPC(jsonRPCHandler.Handler):
- APPSTATE_PATH=join(util.get_home_dir(),"homestate.json")
- @staticmethod
- def load_state():
- state=None
- try:
- if isfile(AppRPC.APPSTATE_PATH):
- state=util.load_json(AppRPC.APPSTATE_PATH)
- except exception.PlatformioException:
- pass
- if not isinstance(state,dict):
- state={}
- storage=state.get("storage",{})
- caller_id=app.get_session_var("caller_id")
- storage['cid']=app.get_cid()
- storage['coreVersion']=__version__
- storage['coreSystype']=util.get_systype()
- storage['coreCaller']=(str(caller_id).lower()if caller_id else None)
- storage['coreSettings']={name:{"description":data['description'],"default_value":data['value'],"value":app.get_setting(name)}for name,data in app.DEFAULT_SETTINGS.items()}
- for key in storage['coreSettings']:
- if not key.endswith("dir"):
- continue
- storage['coreSettings'][key]['default_value']=util.path_to_unicode(storage['coreSettings'][key]['default_value'])
- storage['coreSettings'][key]['value']=util.path_to_unicode(storage['coreSettings'][key]['value'])
- storage['homeDir']=util.path_to_unicode(expanduser("~"))
- storage['projectsDir']=storage['coreSettings']['projects_dir']['value']
- storage['recentProjects']=[p for p in storage.get("recentProjects",[])if util.is_platformio_project(p)]
- state['storage']=storage
- return state
- @jsonRPCHandler.exportRPC()
- def get_state(self):
- try:
- return AppRPC.load_state()
- except Exception as e:
- raise JSONRPCError(e)
- @jsonRPCHandler.exportRPC()
- def save_state(self,state):
- with open(self.APPSTATE_PATH,"w")as fp:
- json.dump(state,fp)
- return True
- class ProjectRPC(jsonRPCHandler.Handler):
- def _get_projects(self,project_dirs=None):
- def _get_project_data(project_dir):
- data={"boards":[],"libExtraDirs":[]}
- config=util.load_project_config(project_dir)
- if config.has_section("platformio")and config.has_option("platformio","lib_extra_dirs"):
- data['libExtraDirs'].extend(util.parse_conf_multi_values(config.get("platformio","lib_extra_dirs")))
- for section in config.sections():
- if not section.startswith("env:"):
- continue
- if config.has_option(section,"board"):
- data['boards'].append(config.get(section,"board"))
- if config.has_option(section,"lib_extra_dirs"):
- data['libExtraDirs'].extend(util.parse_conf_multi_values(config.get(section,"lib_extra_dirs")))
- with util.cd(project_dir):
- data['libExtraDirs']=[expanduser(d)if d.startswith("~")else realpath(d)for d in data['libExtraDirs']]
- data['libExtraDirs']=[d for d in data['libExtraDirs']if isdir(d)]
- return data
- def _path_to_name(path):
- return(sep).join(path.split(sep)[-2:])
- if not project_dirs:
- project_dirs=AppRPC.load_state()['storage']['recentProjects']
- result=[]
- pm=PlatformManager()
- for project_dir in project_dirs:
- data={}
- boards=[]
- try:
- data=_get_project_data(project_dir)
- except exception.NotPlatformIOProject:
- continue
- except ConfigParserError:
- pass
- for board_id in data.get("boards",[]):
- name=board_id
- try:
- name=pm.board_config(board_id)['name']
- except(exception.UnknownBoard,exception.UnknownPlatform):
- pass
- boards.append({"id":board_id,"name":name})
- result.append({"path":project_dir,"name":_path_to_name(project_dir),"modified":int(getmtime(project_dir)),"boards":boards,"extraLibStorages":[{"name":_path_to_name(d),"path":d}for d in data.get("libExtraDirs",[])]})
- return result
- @jsonRPCHandler.exportRPC()
- def get_projects(self,project_dirs=None):
- try:
- return self._get_projects(project_dirs)
- except Exception as e:
- raise JSONRPCError(e)
- @jsonRPCHandler.exportRPC()
- def init(self,board,framework,project_dir):
- assert project_dir
- state=AppRPC.load_state()
- if not isdir(project_dir):
- os.makedirs(project_dir)
- args=["init","--project-dir",project_dir,"--board",board]
- if framework:
- args.extend(["--project-option","framework = %s"%framework])
- if(state['storage']['coreCaller']and state['storage']['coreCaller']in ProjectGenerator.get_supported_ides()):
- args.extend(["--ide",state['storage']['coreCaller']])
- d=PIOCoreRPC.spawn(args)
- d.addCallback(self._generate_project_main,project_dir,framework)
- return d
- @staticmethod
- def _generate_project_main(_,project_dir,framework):
- main_content=None
- if framework=="arduino":
- main_content="\n".join(["#include <Arduino.h>","","void setup() {"," // put your setup code here, to run once:","}","","void loop() {"," // put your main code here, to run repeatedly:","}" ""])
- elif framework=="mbed":
- main_content="\n".join(["#include <mbed.h>","","int main() {",""," // put your setup code here, to run once:",""," while(1) {"," // put your main code here, to run repeatedly:"," }","}",""])
- if not main_content:
- return project_dir
- with util.cd(project_dir):
- src_dir=util.get_projectsrc_dir()
- main_path=join(src_dir,"main.cpp")
- if isfile(main_path):
- return project_dir
- if not isdir(src_dir):
- os.makedirs(src_dir)
- with open(main_path,"w")as f:
- f.write(main_content.strip())
- return project_dir
- @jsonRPCHandler.exportRPC()
- def import_arduino(self,board,use_arduino_libs,arduino_project_dir):
- if util.is_platformio_project(arduino_project_dir):
- return arduino_project_dir
- is_arduino_project=any([isfile(join(arduino_project_dir,"%s.%s"%(basename(arduino_project_dir),ext)))for ext in("ino","pde")])
- if not is_arduino_project:
- raise JSONRPCError("Not an Arduino project: %s"%arduino_project_dir)
- state=AppRPC.load_state()
- project_dir=join(state['storage']['projectsDir'].decode("utf-8"),time.strftime("%y%m%d-%H%M%S-")+board)
- if not isdir(project_dir):
- os.makedirs(project_dir)
- args=["init","--project-dir",project_dir,"--board",board]
- args.extend(["--project-option","framework = arduino"])
- if use_arduino_libs:
- args.extend(["--project-option","lib_extra_dirs = ~/Documents/Arduino/libraries"])
- if(state['storage']['coreCaller']and state['storage']['coreCaller']in ProjectGenerator.get_supported_ides()):
- args.extend(["--ide",state['storage']['coreCaller']])
- d=PIOCoreRPC.spawn(args)
- d.addCallback(self._finalize_arduino_import,project_dir,arduino_project_dir)
- return d
- @staticmethod
- def _finalize_arduino_import(_,project_dir,arduino_project_dir):
- with util.cd(project_dir):
- src_dir=util.get_projectsrc_dir()
- if isdir(src_dir):
- util.rmtree_(src_dir)
- shutil.copytree(arduino_project_dir.encode(sys.getfilesystemencoding()),src_dir)
- return project_dir
- @jsonRPCHandler.exportRPC()
- def get_project_examples(self):
- result=[]
- for manifest in PlatformManager().get_installed():
- examples_dir=join(manifest['__pkg_dir'],"examples")
- if not isdir(examples_dir):
- continue
- items=[]
- for project_dir,_,__ in os.walk(examples_dir):
- project_description=None
- try:
- config=util.load_project_config(project_dir)
- if config.has_section("platformio")and config.has_option("platformio","description"):
- project_description=config.get("platformio","description")
- except(exception.NotPlatformIOProject,exception.InvalidProjectConf):
- continue
- path_tokens=project_dir.split(sep)
- items.append({"name":"/".join(path_tokens[path_tokens.index("examples")+1:]),"path":project_dir,"description":project_description})
- result.append({"platform":{"title":manifest['title'],"version":manifest['version']},"items":sorted(items)})
- return sorted(result,key=lambda data:data['platform']['title'])
- @jsonRPCHandler.exportRPC()
- def import_pio(self,project_dir):
- if not project_dir or not util.is_platformio_project(project_dir):
- raise JSONRPCError("Not an PlatformIO project: %s"%project_dir)
- new_project_dir=join(AppRPC.load_state()['storage']['projectsDir'].decode("utf-8"),time.strftime("%y%m%d-%H%M%S-")+basename(project_dir))
- shutil.copytree(project_dir,new_project_dir)
- state=AppRPC.load_state()
- args=["init","--project-dir",new_project_dir]
- if(state['storage']['coreCaller']and state['storage']['coreCaller']in ProjectGenerator.get_supported_ides()):
- args.extend(["--ide",state['storage']['coreCaller']])
- d=PIOCoreRPC.spawn(args)
- d.addCallback(lambda _:new_project_dir)
- return d
- class OSRPC(jsonRPCHandler.Handler):
- @staticmethod
- @defer.inlineCallbacks
- def fetch_content(uri,data=None,headers=None,cache_valid=None):
- timeout=2
- if not headers:
- headers={"User-Agent":("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) " "AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 " "Safari/603.3.8")}
- cache_key=(app.ContentCache.key_from_args(uri,data)if cache_valid else None)
- with app.ContentCache()as cc:
- if cache_key:
- result=cc.get(cache_key)
- if result is not None:
- defer.returnValue(result)
- util.internet_on(raise_exception=True)
- session=requests_session()
- if data:
- r=yield session.post(uri,data=data,headers=headers,timeout=timeout)
- else:
- r=yield session.get(uri,headers=headers,timeout=timeout)
- r.raise_for_status()
- result=r.text
- if cache_valid:
- with app.ContentCache()as cc:
- cc.set(cache_key,result,cache_valid)
- defer.returnValue(result)
- @jsonRPCHandler.exportRPC()
- def request_content(self,uri,data=None,headers=None,cache_valid=None):
- try:
- if uri.startswith('http'):
- return self.fetch_content(uri,data,headers,cache_valid)
- elif isfile(uri):
- with open(uri)as fp:
- return fp.read()
- except Exception as e:
- e=JSONRPCError("Could not fetch content `%s`"%uri)
- e.data=str(e)
- raise e
- return None
- @jsonRPCHandler.exportRPC()
- def open_url(self,url):
- return click.launch(url)
- @jsonRPCHandler.exportRPC()
- def reveal_file(self,path):
- return click.launch(path.encode(sys.getfilesystemencoding()),locate=True)
- @jsonRPCHandler.exportRPC()
- def is_file(self,path):
- return isfile(path)
- @jsonRPCHandler.exportRPC()
- def is_dir(self,path):
- return isdir(path)
- @jsonRPCHandler.exportRPC()
- def make_dirs(self,path):
- return os.makedirs(path)
- @jsonRPCHandler.exportRPC()
- def rename(self,src,dst):
- return os.rename(src,dst)
- @jsonRPCHandler.exportRPC()
- def copy(self,src,dst):
- return shutil.copytree(src,dst)
- @jsonRPCHandler.exportRPC()
- def glob(self,pathnames,root=None):
- if not isinstance(pathnames,list):
- pathnames=[pathnames]
- result=set()
- for pathname in pathnames:
- result|=set(glob.glob(join(root,pathname)if root else pathname))
- return list(result)
- @jsonRPCHandler.exportRPC()
- def list_dir(self,path):
- def _cmp(x,y):
- if x[1]and not y[1]:
- return-1
- elif not x[1]and y[1]:
- return 1
- elif x[0].lower()>y[0].lower():
- return 1
- elif x[0].lower()<y[0].lower():
- return-1
- return 0
- try:
- items=[]
- if path.startswith("~"):
- path=expanduser(path)
- if not isdir(path):
- return items
- for item in os.listdir(path):
- try:
- item_is_dir=isdir(join(path,item))
- if item_is_dir:
- os.listdir(join(path,item))
- items.append((item,item_is_dir))
- except OSError:
- pass
- return sorted(items,cmp=_cmp)
- except Exception as e:
- e=JSONRPCError("Could not list directory by `%s`"%path)
- e.data=str(e)
- raise e
- @jsonRPCHandler.exportRPC()
- def get_logical_devices(self):
- items=[]
- try:
- for item in util.get_logical_devices():
- if item['name']:
- item['name']=util.path_to_unicode(item['name'])
- items.append(item)
- except Exception as e:
- e=JSONRPCError("Could not fetch logical disks")
- e.data=str(e)
- raise e
- return items
- class PIOCoreRPC(jsonRPCHandler.Handler):
- @staticmethod
- def spawn(args,options=None):
- try:
- args=[arg.encode(sys.getfilesystemencoding())if isinstance(arg,basestring)else str(arg)for arg in args]
- except UnicodeError:
- raise JSONRPCError("PIO Core: non-ASCII chars in arguments")
- d=getProcessOutputAndValue(get_core_fullpath(),args,path=(options or{}).get("cwd"),env={k:v for k,v in os.environ.items()if "%" not in k})
- d.addCallback(PIOCoreRPC._spawn_callback,"--json-output" in args)
- d.addErrback(PIOCoreRPC._spawn_errback)
- return d
- @staticmethod
- def _spawn_callback(result,json_output=False):
- out,err,code=result
- out=out.strip()if out else ""
- err=err.strip()if err else ""
- result=("%s\n\n%s"%(out,err)).strip()
- if code!=0:
- raise Exception(result)
- result=util.path_to_unicode(result)
- if not json_output:
- return result
- try:
- return json.loads(result)
- except ValueError as e:
- if "sh: " in result:
- return json.loads(re.sub(r"^sh: [^\n]+$","",result,flags=re.M).strip())
- raise e
- @staticmethod
- def _spawn_errback(failure):
- e=JSONRPCError("PIO Core Call Error")
- e.data=failure.getErrorMessage()
- raise e
- @jsonRPCHandler.exportRPC()
- def call(self,args,options=None):
- return self.spawn(args,options)
- @jsonRPCHandler.exportRPC()
- def version(self):
- return __version__
- @jsonRPCHandler.exportRPC()
- def auth_info(self,extended=False):
- data=cmd_account.get_session_data()
- if not data or data['expire']<time.time():
- return None
- try:
- return cmd_account.get_account_info(offline=not extended)
- except exception.InternetIsOffline as e:
- raise JSONRPCError(str(e))
- class IDERPC(jsonRPCHandler.Handler):
- def __init__(self):
- jsonRPCHandler.Handler.__init__(self)
- self._queue=[]
- def send_command(self,command,params):
- if not self._queue:
- raise JSONRPCError("PIO Home IDE agent is not started")
- while self._queue:
- self._queue.pop().callback({"id":time.time(),"method":command,"params":params})
- @jsonRPCHandler.exportRPC()
- def listen_commands(self):
- self._queue.append(defer.Deferred())
- return self._queue[-1]
- @jsonRPCHandler.exportRPC()
- def open_project(self,project_dir):
- return self.send_command("open_project",project_dir)
- class MiscRPC(jsonRPCHandler.Handler):
- @staticmethod
- def _get_proxed_uri(uri):
- index=uri.index("://")
- return "https://dl.platformio.org/__prx__/"+uri[index+3:]
- def _get_twitter_headers(self,username):
- return{"Accept":"application/json, text/javascript, */*; q=0.01","Referer":"https://twitter.com/%s"%username,"User-Agent":("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit" "/603.3.8 (KHTML, like Gecko) Version/10.1.2 Safari/603.3.8"),"X-Twitter-Active-User":"yes","X-Requested-With":"XMLHttpRequest"}
- @defer.inlineCallbacks
- def _fetch_iframe_card(self,url,username):
- if is_twitter_blocked():
- url=self._get_proxed_uri(url)
- html=yield OSRPC.fetch_content(url,headers=self._get_twitter_headers(username),cache_valid="7d")
- soup=BeautifulSoup(html,"html.parser")
- photo_node=soup.find("img",attrs={"data-src":True})
- url_node=soup.find("a",class_="TwitterCard-container")
- text_node=soup.find("div",class_="SummaryCard-content")
- if text_node:
- text_node.find("span",class_="SummaryCard-destination").decompose()
- defer.returnValue({"photo":photo_node.get("data-src")if photo_node else None,"text_node":text_node,"url":url_node.get("href")if url_node else None})
- @defer.inlineCallbacks
- def _parse_tweet_node(self,tweet,username):
- for node in tweet.find_all(class_=["invisible","u-hidden"]):
- node.decompose()
- twitter_url="https://twitter.com"
- time_node=tweet.find("span",attrs={"data-time":True})
- text_node=tweet.find(class_="tweet-text")
- quote_text_node=tweet.find(class_="QuoteTweet-text")
- if quote_text_node and not text_node.get_text().strip():
- text_node=quote_text_node
- photos=[node.get("data-image-url")for node in(tweet.find_all(class_=["AdaptiveMedia-photoContainer","QuoteMedia-photoContainer"])or[])]
- urls=[node.get("data-expanded-url")for node in(quote_text_node or text_node).find_all(class_="twitter-timeline-link",attrs={"data-expanded-url":True})]
- if(not photos or not urls)and tweet.get("data-card2-type"):
- iframe_node=tweet.find("div",attrs={"data-full-card-iframe-url":True})
- if iframe_node:
- iframe_card=yield self._fetch_iframe_card(twitter_url+iframe_node.get("data-full-card-iframe-url"),username)
- if not photos and iframe_card['photo']:
- photos.append(iframe_card['photo'])
- if not urls and iframe_card['url']:
- urls.append(iframe_card['url'])
- if iframe_card['text_node']:
- text_node=iframe_card['text_node']
- if not photos:
- photos.append(tweet.find("img",class_="avatar").get("src"))
- def _fetch_text(text_node):
- text=text_node.decode_contents(formatter="html").strip()
- text=re.sub(r'href="/','href="%s/'%twitter_url,text)
- if "</p>" not in text and "<br" not in text:
- text=re.sub(r"\n+","<br />",text)
- return text
- defer.returnValue({"tweetId":tweet.get("data-tweet-id"),"tweetUrl":twitter_url+tweet.get("data-permalink-path"),"author":tweet.get("data-name"),"time":int(time_node.get("data-time")),"timeFormatted":time_node.string,"text":_fetch_text(text_node),"entries":{"urls":urls,"photos":[self._get_proxed_uri(uri)if is_twitter_blocked()else uri for uri in photos]},"isPinned":"user-pinned" in tweet.get("class")})
- @defer.inlineCallbacks
- def _fetch_tweets(self,username):
- api_url=("https://twitter.com/i/profiles/show/%s/timeline/tweets?" "include_available_features=1&include_entities=1&" "include_new_items_bar=true")%username
- if is_twitter_blocked():
- api_url=self._get_proxed_uri(api_url)
- html_or_json=yield OSRPC.fetch_content(api_url,headers=self._get_twitter_headers(username))
- if not isinstance(html_or_json,dict):
- html_or_json=json.loads(html_or_json)
- assert "items_html" in html_or_json
- soup=BeautifulSoup(html_or_json['items_html'],"html.parser")
- tweet_nodes=soup.find_all("div",attrs={"class":"tweet","data-tweet-id":True})
- defer.returnValue([(yield self._parse_tweet_node(node,username))for node in tweet_nodes])
- @defer.inlineCallbacks
- def _preload_latest_tweets(self,username,cache_key,cache_valid):
- result=yield self._fetch_tweets(username)
- with app.ContentCache()as cc:
- cc.set(cache_key,json.dumps({"time":int(time.time()),"result":result}),cache_valid)
- defer.returnValue(result)
- @jsonRPCHandler.exportRPC()
- def load_latest_tweets(self,username):
- cache_key="piohome_latest_tweets_%s"%username
- cache_valid="7d"
- with app.ContentCache()as cc:
- cache_data=cc.get(cache_key)
- if cache_data:
- cache_data=json.loads(cache_data)
- if cache_data['time']<(time.time()-(3600*12)):
- reactor.callLater(5,self._preload_latest_tweets,username,cache_key,cache_valid)
- return cache_data['result']
- try:
- return self._preload_latest_tweets(username,cache_key,cache_valid)
- except Exception as e:
- raise JSONRPCError(e)
- class WebRoot(static.File):
- def render_GET(self,request):
- if request.args.get("__shutdown__",False):
- reactor.stop()
- return "Server has been stopped"
- request.setHeader('cache-control','no-cache, no-store, must-revalidate')
- request.setHeader('pragma','no-cache')
- request.setHeader('expires',0)
- return static.File.render_GET(self,request)
- @click.command("home",short_help="PIO Home")
- @click.option("--port",type=int,default=8008,help="HTTP port, default=8008")
- @click.option("--host",default="127.0.0.1",help="HTTP host, default=127.0.0.1. " "You can open PIO Home for inbound connections with --host=0.0.0.0")
- @click.option("--no-open",is_flag=True)
- def cli(port,host,no_open):
- factory=JSONRPCServerFactory()
- factory.addHandler(AppRPC(),namespace="app")
- factory.addHandler(ProjectRPC(),namespace="project")
- factory.addHandler(PIOCoreRPC(),namespace="core")
- factory.addHandler(OSRPC(),namespace="os")
- factory.addHandler(IDERPC(),namespace="ide")
- factory.addHandler(MiscRPC(),namespace="misc")
- contrib_dir=get_core_package_dir("contrib-piohome")
- if not isdir(contrib_dir):
- raise exception.PlatformioException("Invalid path to PIO Home Contrib")
- root=WebRoot(contrib_dir)
- root.putChild("wsrpc",SockJSResource(factory))
- root.putChild("rpc",factory)
- site=server.Site(root)
- if host=="__do_not_start__":
- return
- already_started=False
- socket.setdefaulttimeout(1)
- try:
- socket.socket(socket.AF_INET,socket.SOCK_STREAM).connect((host,port))
- already_started=True
- except:
- pass
- home_url="http://%s:%d"%(host,port)
- if not no_open:
- if already_started:
- click.launch(home_url)
- else:
- reactor.callLater(1,lambda:click.launch(home_url))
- click.echo("\n".join([""," ___I_"," /\\-_--\\ PlatformIO Home","/ \\_-__\\","|[]| [] | %s"%home_url,"|__|____|______________%s"%("_"*len(host)),]))
- if already_started:
- return
- reactor.listenTCP(port,site,interface=host)
- reactor.run()
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
- #$main
- import load_pysite
- import os
- import sys
- from platform import system
- from traceback import format_exc
- import click
- from platformio import app,exception,maintenance
- import cmd_account
- import cmd_debug
- import cmd_home
- import cmd_remote
- import cmd_test
- from config import __version__
- COMMANDS={"account":cmd_account,"debug":cmd_debug,"home":cmd_home,"remote":cmd_remote,"test":cmd_test}
- class PlatformioPlusCLI(click.MultiCommand):
- def list_commands(self,ctx):
- return COMMANDS.keys()
- def get_command(self,ctx,cmd_name):
- if cmd_name not in COMMANDS:
- raise exception.PlatformioException("Unknown command `%s`"%cmd_name)
- return COMMANDS[cmd_name].cli
- @click.command(cls=PlatformioPlusCLI,context_settings=dict(help_option_names=["-h","--help"]))
- @click.version_option(__version__,prog_name="PIO Plus")
- @click.option("--force","-f",is_flag=True,help="Force to accept any confirmation prompts.")
- @click.option("--caller","-c",help="Caller ID (service).")
- @click.pass_context
- def cli(ctx,force,caller):
- if not maintenance.in_silence(ctx):
- banner="PIO Plus (https://pioplus.com) v%s"%__version__
- ctx_args=ctx.args or[]
- if ctx_args and ctx_args[0]=="debug" and "--interpreter" in " ".join(ctx_args):
- click.echo('~"%s\\n"'%banner)
- else:
- click.echo(banner)
- app.set_session_var("command_ctx",ctx)
- app.set_session_var("force_option",force)
- maintenance.set_caller(caller)
- cmd_account.cmd_check_permission(ctx)
- def configure():
- if "cygwin" in system().lower():
- raise exception.CygwinEnvDetected()
- try:
- import urllib3
- urllib3.disable_warnings()
- except(AttributeError,ImportError):
- pass
- if str(os.getenv("PLATFORMIO_FORCE_COLOR","")).lower()=="true":
- try:
- click._compat.isatty=lambda stream:True
- except:
- pass
- click_echo_origin=[click.echo,click.secho]
- def _safe_echo(origin,*args,**kwargs):
- try:
- click_echo_origin[origin](*args,**kwargs)
- except IOError:
- data=args[0]if args else ""
- if kwargs.get("nl",True):
- data+="\n"
- (sys.stderr.write if kwargs.get("err")else sys.stdout.write)(data)
- click.echo=lambda*args,**kwargs:_safe_echo(0,*args,**kwargs)
- click.secho=lambda*args,**kwargs:_safe_echo(1,*args,**kwargs)
- def main():
- try:
- configure()
- cli(None,None,None)
- except Exception as e:
- if not isinstance(e,exception.ReturnErrorCode):
- maintenance.on_platformio_exception(e)
- error_str="Error: "
- if isinstance(e,exception.PlatformioException):
- error_str+=str(e)
- else:
- error_str+=format_exc()
- error_str+="""
- ============================================================
- An unexpected error occurred. Further steps:
- * Verify that you have the latest version of PlatformIO using
- `pip install -U platformio` command
- * Try to find answer in FAQ Troubleshooting section
- http://docs.platformio.org/page/faq.html
- * Report this problem to support@pioplus.com
- ============================================================
- """
- click.secho(error_str,fg="red",err=True)
- return int(str(e))if str(e).isdigit()else 1
- return 0
- if __name__=="__main__":
- sys.exit(main())
- # Created by pyminifier (https://github.com/liftoff/pyminifier)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement