signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def random_pairs_without_replacement_large_frames ( n , shape , random_state = None ) :
"""Make a sample of random pairs with replacement""" | n_max = max_pairs ( shape )
sample = np . array ( [ ] )
# Run as long as the number of pairs is less than the requested number
# of pairs n .
while len ( sample ) < n : # The number of pairs to sample ( sample twice as much record pairs
# because the duplicates are dropped ) .
n_sample_size = ( n - len ( sample ) )... |
def success ( headers = None , data = '' ) :
"""Generate success JSON to send to client""" | passed_headers = { } if headers is None else headers
if isinstance ( data , dict ) :
data = json . dumps ( data )
ret_headers = { 'status' : 'ok' }
ret_headers . update ( passed_headers )
return server_responce ( ret_headers , data ) |
def augment_observation ( observation , reward , cum_reward , frame_index , bar_color = None , header_height = 27 ) :
"""Augments an observation with debug info .""" | img = PIL_Image ( ) . new ( "RGB" , ( observation . shape [ 1 ] , header_height , ) )
draw = PIL_ImageDraw ( ) . Draw ( img )
draw . text ( ( 1 , 0 ) , "c:{:3}, r:{:3}" . format ( int ( cum_reward ) , int ( reward ) ) , fill = ( 255 , 0 , 0 ) )
draw . text ( ( 1 , 15 ) , "f:{:3}" . format ( int ( frame_index ) ) , fill... |
def _result ( self , timeout = None ) :
"""Return the result , if available .
It may take an unknown amount of time to return the result , so a
timeout option is provided . If the given number of seconds pass with
no result , a TimeoutError will be thrown .
If a previous call timed out , additional calls to... | if timeout is None :
warnings . warn ( "Unlimited timeouts are deprecated." , DeprecationWarning , stacklevel = 3 )
# Queue . get ( None ) won ' t get interrupted by Ctrl - C . . .
timeout = 2 ** 31
self . _result_set . wait ( timeout )
# In Python 2.6 we can ' t rely on the return result of wait ( ) , so w... |
def to_add ( self ) :
'''To add the entity .''' | kwd = { 'pager' : '' , }
self . render ( 'misc/entity/entity_add.html' , cfg = config . CMS_CFG , kwd = kwd , userinfo = self . userinfo ) |
def register_quantity ( quantity , name ) :
"""Register ` name ` as a type to validate as an instance of class ` cls ` .""" | x = TypeDefinition ( name , ( quantity . __class__ , ) , ( ) )
Validator . types_mapping [ name ] = x |
def fetch_url ( url ) :
"""Fetch the given url , strip formfeeds and decode
it into the defined encoding""" | with closing ( urllib . urlopen ( url ) ) as f :
if f . code is 200 :
response = f . read ( )
return strip_formfeeds ( response ) . decode ( ENCODING ) |
def OnSelectCard ( self , event ) :
"""Called when the user selects a card in the tree .""" | item = event . GetItem ( )
if item :
itemdata = self . readertreepanel . cardtreectrl . GetItemPyData ( item )
if isinstance ( itemdata , smartcard . Card . Card ) :
self . dialogpanel . OnSelectCard ( itemdata )
else :
self . dialogpanel . OnDeselectCard ( itemdata ) |
def create ( self , validated_data ) :
'''We want to set the username to be the same as the email , and use
the correct create function to make use of password hashing .''' | validated_data [ 'username' ] = validated_data [ 'email' ]
admin = validated_data . pop ( 'is_superuser' , None )
if admin is True :
user = User . objects . create_superuser ( ** validated_data )
else :
user = User . objects . create_user ( ** validated_data )
return user |
def list ( cls , args ) : # pylint : disable = unused - argument
"""List all installed NApps and inform whether they are enabled .""" | mgr = NAppsManager ( )
# Add status
napps = [ napp + ( '[ie]' , ) for napp in mgr . get_enabled ( ) ]
napps += [ napp + ( '[i-]' , ) for napp in mgr . get_disabled ( ) ]
# Sort , add description and reorder columns
napps . sort ( )
napps_ordered = [ ]
for user , name , status in napps :
description = mgr . get_desc... |
async def main ( ) :
"""The main part of the example script .""" | async with aiohttp . ClientSession ( ) as session :
zaehler = Volkszaehler ( loop , session , UUID , host = HOST )
# Get the data
await zaehler . get_data ( )
print ( "Average:" , zaehler . average )
print ( "Max:" , zaehler . max )
print ( "Min:" , zaehler . min )
print ( "Consumption:" , z... |
def get_DOI ( self ) :
"""This method defines how the Article tries to detect the DOI .
It attempts to determine the article DOI string by DTD - appropriate
inspection of the article metadata . This method should be made as
flexible as necessary to properly collect the DOI for any XML
publishing specificati... | if self . dtd_name == 'JPTS' :
doi = self . root . xpath ( "./front/article-meta/article-id[@pub-id-type='doi']" )
if doi :
return doi [ 0 ] . text
log . warning ( 'Unable to locate DOI string for this article' )
return None
else :
log . warning ( 'Unable to locate DOI string for this articl... |
def compute_kwinners ( x , k , dutyCycles , boostStrength ) :
"""Use the boost strength to compute a boost factor for each unit represented
in x . These factors are used to increase the impact of each unit to improve
their chances of being chosen . This encourages participation of more columns
in the learning... | k = tf . convert_to_tensor ( k , dtype = tf . int32 )
boostStrength = tf . math . maximum ( boostStrength , 0.0 , name = "boostStrength" )
targetDensity = tf . cast ( k / x . shape [ 1 ] , tf . float32 , name = "targetDensity" )
boostFactors = tf . exp ( ( targetDensity - dutyCycles ) * boostStrength , name = "boostFac... |
def config ( name = 'DATABASE_URL' , default = 'sqlite://:memory:' ) :
"""Returns configured DATABASE dictionary from DATABASE _ URL .""" | config = { }
s = env ( name , default )
if s :
config = parse_database_url ( s )
return config |
def pick_env_and_run_and_report ( self , env : env_tools . PreparedEnv , env_py2 : Optional [ env_tools . PreparedEnv ] , verbose : bool , previous_failures : Set [ 'Check' ] ) -> CheckResult :
"""Evaluates this check in python 3 or 2.7 , and reports to github .
If the prepared environments are not linked to a gi... | env . report_status_to_github ( 'pending' , 'Running...' , self . context ( ) )
chosen_env = cast ( env_tools . PreparedEnv , env_py2 if self . needs_python2_env ( ) else env )
os . chdir ( cast ( str , chosen_env . destination_directory ) )
result = self . run ( chosen_env , verbose , previous_failures )
if result . u... |
def xstep ( self ) :
r"""Minimise Augmented Lagrangian with respect to
: math : ` \ mathbf { x } ` .""" | self . X = np . asarray ( sl . cho_solve_ATAI ( self . D , self . rho , self . DTS + self . rho * ( self . Y - self . U ) , self . lu , self . piv ) , dtype = self . dtype )
if self . opt [ 'LinSolveCheck' ] :
b = self . DTS + self . rho * ( self . Y - self . U )
ax = self . D . T . dot ( self . D . dot ( self ... |
def rotate ( self , shift ) :
'''Rotate 90 degrees clockwise ` shift ` times . If ` shift ` is negative ,
rotate counter - clockwise .''' | self . child_corners . values [ : ] = np . roll ( self . child_corners . values , shift , axis = 0 )
self . update_transform ( ) |
def _add_to_ref ( self , rec_curr , line ) :
"""Add new fields to the current reference .""" | # Examples of record lines containing ' : ' include :
# id : GO : 000002
# name : mitochondrial genome maintenance
# namespace : biological _ process
# def : " The maintenance of . . .
# is _ a : GO : 0007005 ! mitochondrion organization
if line [ : 4 ] == "id: " :
assert not rec_curr . item_id
item_id = line [... |
def _resetSelection ( self , moveToTop = False ) :
"""Reset selection .
If moveToTop is True - move cursor to the top position""" | ancor , pos = self . _qpart . selectedPosition
dst = min ( ancor , pos ) if moveToTop else pos
self . _qpart . cursorPosition = dst |
def verification_count ( self ) :
"""Get Verification Count . Uses HEAD to / verifications interface .
: Returns : ( int ) Number of verifications""" | response = self . _head ( url . verifications )
self . _check_response ( response , 200 )
return int ( response . headers . get ( 'x-verification-count' , - 1 ) ) |
def verify_rsa_sha1 ( request , rsa_public_key ) :
"""Verify a RSASSA - PKCS # 1 v1.5 base64 encoded signature .
Per ` section 3.4.3 ` _ of the spec .
Note this method requires the jwt and cryptography libraries .
. . _ ` section 3.4.3 ` : https : / / tools . ietf . org / html / rfc5849 # section - 3.4.3
To... | norm_params = normalize_parameters ( request . params )
bs_uri = base_string_uri ( request . uri )
sig_base_str = signature_base_string ( request . http_method , bs_uri , norm_params ) . encode ( 'utf-8' )
sig = binascii . a2b_base64 ( request . signature . encode ( 'utf-8' ) )
alg = _jwt_rs1_signing_algorithm ( )
key ... |
def glob ( self , pat ) :
"""` pat ` can be an extended glob pattern , e . g . ` ' * * / * . less ' `
This code handles negations similarly to node . js ' minimatch , i . e .
a leading ` ! ` will negate the entire pattern .""" | r = ""
negate = int ( pat . startswith ( '!' ) )
i = negate
while i < len ( pat ) :
if pat [ i : i + 3 ] == '**/' :
r += "(?:.*/)?"
i += 3
elif pat [ i ] == "*" :
r += "[^/]*"
i += 1
elif pat [ i ] == "." :
r += "[.]"
i += 1
elif pat [ i ] == "?" :
... |
def get_instance ( self , payload ) :
"""Build an instance of TaskChannelInstance
: param dict payload : Payload response from the API
: returns : twilio . rest . taskrouter . v1 . workspace . task _ channel . TaskChannelInstance
: rtype : twilio . rest . taskrouter . v1 . workspace . task _ channel . TaskCha... | return TaskChannelInstance ( self . _version , payload , workspace_sid = self . _solution [ 'workspace_sid' ] , ) |
def check_name ( name , safe_chars ) :
'''Check whether the specified name contains invalid characters''' | regexp = re . compile ( '[^{0}]' . format ( safe_chars ) )
if regexp . search ( name ) :
raise SaltCloudException ( '{0} contains characters not supported by this cloud provider. ' 'Valid characters are: {1}' . format ( name , safe_chars ) ) |
def get_collector_path ( base_url = None ) :
"""Returns the IOpipe collector ' s path . By default this is ` / v0 / event ` .
: param base _ url : An optional base URL to use .
: returns : The collector ' s path .
: rtype : str""" | if not base_url :
return "/v0/event"
event_url = urlparse ( base_url )
event_path = urljoin ( event_url . path , "v0/event" )
if not event_path . startswith ( "/" ) :
event_path = "/%s" % event_path
if event_url . query :
event_path = "?" . join ( [ event_path , event_url . query ] )
return event_path |
def nodeSatisfiesNumericFacet ( cntxt : Context , n : Node , nc : ShExJ . NodeConstraint , _c : DebugContext ) -> bool :
"""` 5.4.5 XML Schema Numeric Facet Constraints < http : / / shex . io / shex - semantics / # xs - numeric > ` _
Numeric facet constraints apply to the numeric value of RDF Literals with dataty... | if nc . mininclusive is not None or nc . minexclusive is not None or nc . maxinclusive is not None or nc . maxexclusive is not None or nc . totaldigits is not None or nc . fractiondigits is not None :
if is_numeric ( n ) :
v = n . value
if isinstance ( v , numbers . Number ) :
if ( nc . ... |
def normalize_input_value ( value ) :
"""Returns an input value normalized for RightScale API 2.0.
This typically means adjusting the * input type * prefix to be one of the
valid values : :
blank
ignore
inherit
text :
env :
cred :
key :
array :
This list comes from the table published here :
... | if value in ( 'blank' , 'ignore' , 'inherit' ) :
return value
# assume any unspecified or unknown types are text
tokens = value . split ( ':' )
if ( len ( tokens ) < 2 or tokens [ 0 ] not in ( 'text' , 'env' , 'cred' , 'key' , 'array' ) ) :
return 'text:%s' % value
return value |
def bake ( self ) :
"""Bake a ` rubocop ` command so it ' s ready to execute and returns None .
: return : None""" | self . _rubocop_command = sh . rubocop . bake ( self . options , self . _tests , _env = self . env , _out = LOG . out , _err = LOG . error ) |
def lookup ( self , istring ) :
"""istring = the ilwd : char string corresponding to a unique id""" | try :
return self . uqids [ istring ]
except KeyError :
curs = self . curs
curs . execute ( 'VALUES BLOB(GENERATE_UNIQUE())' )
self . uqids [ istring ] = curs . fetchone ( ) [ 0 ]
return self . uqids [ istring ] |
def h ( self ) :
r"""Returns the step size to be used in numerical differentiation with
respect to the model parameters .
The step size is given as a vector with length ` ` n _ modelparams ` ` so
that each model parameter can be weighted independently .""" | if np . size ( self . _h ) > 1 :
assert np . size ( self . _h ) == self . n_modelparams
return self . _h
else :
return self . _h * np . ones ( self . n_modelparams ) |
def validate_api_call ( schema , raw_request , raw_response ) :
"""Validate the request / response cycle of an api call against a swagger
schema . Request / Response objects from the ` requests ` and ` urllib ` library
are supported .""" | request = normalize_request ( raw_request )
with ErrorDict ( ) as errors :
try :
validate_request ( request = request , schema = schema , )
except ValidationError as err :
errors [ 'request' ] . add_error ( err . messages or getattr ( err , 'detail' ) )
return
response = normalize_re... |
def _wait_ready ( self , timeout_sec = 1 ) :
"""Wait until the PN532 is ready to receive commands . At most wait
timeout _ sec seconds for the PN532 to be ready . If the PN532 is ready
before the timeout is exceeded then True will be returned , otherwise
False is returned when the timeout is exceeded .""" | start = time . time ( )
# Send a SPI status read command and read response .
self . _gpio . set_low ( self . _cs )
self . _busy_wait_ms ( 2 )
response = self . _spi . transfer ( [ PN532_SPI_STATREAD , 0x00 ] )
self . _gpio . set_high ( self . _cs )
# Loop until a ready response is received .
while response [ 1 ] != PN5... |
def IncrementCounter ( self , metric_name , delta = 1 , fields = None ) :
"""See base class .""" | if delta < 0 :
raise ValueError ( "Invalid increment for counter: %d." % delta )
self . _counter_metrics [ metric_name ] . Increment ( delta , fields ) |
def intersect_range_array ( bed1 , beds2 , payload = None , is_sorted = False ) :
"""Does not do a merge if the payload has been set
: param bed1:
: param bed2:
: param payload : payload = 1 return the payload of bed1 on each of the intersect set , payload = 2 return the payload of bed2 on each of the union s... | if not is_sorted :
beds2 = sort_ranges ( beds2 )
output = [ ]
for bed2 in beds2 :
cval = bed2 . cmp ( bed1 )
# print str ( cval ) + " " + bed1 . get _ range _ string ( ) + " " + bed2 . get _ range _ string ( )
if cval == - 1 :
continue
elif cval == 0 :
output . append ( bed1 . inters... |
def main ( ) :
'''Main routine .''' | # validate command line arguments
arg_parser = argparse . ArgumentParser ( )
arg_parser . add_argument ( '--uri' , '-u' , required = True , action = 'store' , help = 'Template URI' )
arg_parser . add_argument ( '--params' , '-p' , required = True , action = 'store' , help = 'Parameters json file' )
arg_parser . add_arg... |
def offset ( self , offset ) :
"""Fetch results after ` offset ` value""" | clone = self . _clone ( )
if isinstance ( offset , int ) :
clone . _offset = offset
return clone |
def get_comparable_values ( self ) :
"""Return a tupple of values representing the unicity of the object""" | return ( not self . generic , int ( self . code ) , str ( self . message ) , str ( self . description ) ) |
def InstallNanny ( self ) :
"""Install the nanny program .""" | # We need to copy the nanny sections to the registry to ensure the
# service is correctly configured .
new_config = config . CONFIG . MakeNewConfig ( )
new_config . SetWriteBack ( config . CONFIG [ "Config.writeback" ] )
for option in self . nanny_options :
new_config . Set ( option , config . CONFIG . Get ( option... |
def add_new_target ( self , address , target_type , target_base = None , dependencies = None , derived_from = None , ** kwargs ) :
"""Creates a new target , adds it to the context and returns it .
This method ensures the target resolves files against the given target _ base , creating the
directory if needed an... | rel_target_base = target_base or address . spec_path
abs_target_base = os . path . join ( get_buildroot ( ) , rel_target_base )
if not os . path . exists ( abs_target_base ) :
os . makedirs ( abs_target_base )
# TODO : Adding source roots on the fly like this is yucky , but hopefully this
# method will go a... |
def deal_with_changeset_stack_policy ( self , fqn , stack_policy ) :
"""Set a stack policy when using changesets .
ChangeSets don ' t allow you to set stack policies in the same call to
update them . This sets it before executing the changeset if the
stack policy is passed in .
Args :
stack _ policy ( : c... | if stack_policy :
kwargs = generate_stack_policy_args ( stack_policy )
kwargs [ "StackName" ] = fqn
logger . debug ( "Setting stack policy on %s." , fqn )
self . cloudformation . set_stack_policy ( ** kwargs ) |
def Print ( self , output_writer ) :
"""Prints a human readable version of the filter .
Args :
output _ writer ( CLIOutputWriter ) : output writer .""" | if self . _date_time_ranges :
for date_time_range in self . _date_time_ranges :
if date_time_range . start_date_time is None :
end_time_string = date_time_range . end_date_time . CopyToDateTimeString ( )
output_writer . Write ( '\t{0:s} after {1:s}\n' . format ( date_time_range . tim... |
def __create_log_props ( cls , log_props , _getdict , _setdict ) : # @ NoSelf
"""Creates all the logical property .
The list of names of properties to be created is passed
with frozenset log _ props . The getter / setter information is
taken from _ { get , set } dict .
This method resolves also wildcards in... | real_log_props = set ( )
resolved_getdict = { }
resolved_setdict = { }
for _dict_name , _dict , _resolved_dict in ( ( "getter" , _getdict , resolved_getdict ) , ( "setter" , _setdict , resolved_setdict ) ) : # first resolve all wildcards
for pat , ai in ( ( pat , ai ) for pat , ai in _dict . items ( ) if frozenset ... |
def get_filtered_keys ( self , suffix , * args , ** kwargs ) :
"""Returns the index key for the given args " value " ( ` args ` )
Parameters
kwargs : dict
use _ lua : bool
Default to ` ` True ` ` , if scripting is supported .
If ` ` True ` ` , the process of reading from the sorted - set , extracting
th... | accepted_key_types = kwargs . get ( 'accepted_key_types' , None )
if accepted_key_types and 'set' not in accepted_key_types and 'zset' not in accepted_key_types :
raise ImplementationError ( '%s can only return keys of type "set" or "zset"' % self . __class__ . __name__ )
key_type = 'set' if not accepted_key_types ... |
def load_config ( self , config_file_name ) :
"""Load configuration file from prt or str .
Configuration file type is extracted from the file suffix - prt or str .
: param config _ file _ name : full path to the configuration file .
IxTclServer must have access to the file location . either :
The config fil... | config_file_name = config_file_name . replace ( '\\' , '/' )
ext = path . splitext ( config_file_name ) [ - 1 ] . lower ( )
if ext == '.prt' :
self . api . call_rc ( 'port import "{}" {}' . format ( config_file_name , self . uri ) )
elif ext == '.str' :
self . reset ( )
self . api . call_rc ( 'stream import... |
def main ( ) :
"""Main function for the deprecated ' sl ' command .""" | print ( "ERROR: Use the 'slcli' command instead." , file = sys . stderr )
print ( "> slcli %s" % ' ' . join ( sys . argv [ 1 : ] ) , file = sys . stderr )
exit ( - 1 ) |
def split_file_urls_by_size ( self , size ) :
"""Return tuple that contains a list large files and a list of small files based on size parameter
: param size : int : size ( in bytes ) that determines if a file is large or small
: return : ( [ ProjectFileUrl ] , [ ProjectFileUrl ] ) : ( large file urls , small f... | large_items = [ ]
small_items = [ ]
for file_url in self . file_urls :
if file_url . size >= size :
large_items . append ( file_url )
else :
small_items . append ( file_url )
return large_items , small_items |
def _getphoto_location ( self , pid ) :
"""Asks fb for photo location information
returns tuple with lat , lon , accuracy""" | logger . debug ( '%s - Getting location from fb' % ( pid ) )
lat = None
lon = None
accuracy = None
resp = self . fb . photos_geo_getLocation ( photo_id = pid )
if resp . attrib [ 'stat' ] != 'ok' :
logger . error ( "%s - fb: photos_geo_getLocation failed with status: %s" , resp . attrib [ 'stat' ] ) ;
return ( ... |
def preprocessFastqs ( fastqFNs , seqFNPrefix , offsetFN , abtFN , areUniform , logger ) :
'''This function does the grunt work behind string extraction for fastq files
@ param fastqFNs - a list of . fq filenames for parsing
@ param seqFNPrefix - this is always of the form ' < DIR > / seqs . npy '
@ param off... | # create a seqArray
seqArray = [ ]
tempFileId = 0
seqsPerFile = 1000000
maxSeqLen = - 1
numSeqs = 0
subSortFNs = [ ]
for fnID , fn in enumerate ( fastqFNs ) : # open the file and read in starting form the second , every 4th line
logger . info ( 'Loading \'' + fn + '\'...' )
if fn . endswith ( '.gz' ) :
... |
def destroy ( self ) :
"""Destroy and close the App .
: return :
None .
: note :
Once destroyed an App can no longer be used .""" | # if this is the main _ app - set the _ main _ app class variable to ` None ` .
if self == App . _main_app :
App . _main_app = None
self . tk . destroy ( ) |
def is_valid_github_uri ( uri : URI , expected_path_terms : Tuple [ str , ... ] ) -> bool :
"""Return a bool indicating whether or not the URI fulfills the following specs
Valid Github URIs * must * :
- Have ' https ' scheme
- Have ' api . github . com ' authority
- Have a path that contains all " expected ... | if not is_text ( uri ) :
return False
parsed = parse . urlparse ( uri )
path , scheme , authority = parsed . path , parsed . scheme , parsed . netloc
if not all ( ( path , scheme , authority ) ) :
return False
if any ( term for term in expected_path_terms if term not in path ) :
return False
if scheme != "h... |
def listdir ( dir_pathname , recursive = True , topdown = True , followlinks = False ) :
"""Enlists all items using their absolute paths in a directory , optionally
recursively .
: param dir _ pathname :
The directory to traverse .
: param recursive :
` ` True ` ` for walking recursively through the direc... | for root , dirnames , filenames in walk ( dir_pathname , recursive , topdown , followlinks ) :
for dirname in dirnames :
yield absolute_path ( os . path . join ( root , dirname ) )
for filename in filenames :
yield absolute_path ( os . path . join ( root , filename ) ) |
def run ( user , port = 4242 ) :
"""Build a temporary directory with a visualization and serve it over HTTP .
Examples
> > > bandicoot . visualization . run ( U )
Successfully exported the visualization to / tmp / tmpsIyncS
Serving bandicoot visualization at http : / / 0.0.0.0:4242""" | owd = os . getcwd ( )
dir = export ( user )
os . chdir ( dir )
Handler = SimpleHTTPServer . SimpleHTTPRequestHandler
try :
httpd = SocketServer . TCPServer ( ( "" , port ) , Handler )
print ( "Serving bandicoot visualization at http://0.0.0.0:%i" % port )
httpd . serve_forever ( )
except KeyboardInterrupt :... |
def shorten_aead ( aead ) :
"""Produce pretty - printable version of long AEAD .""" | head = aead . data [ : 4 ] . encode ( 'hex' )
tail = aead . data [ - 4 : ] . encode ( 'hex' )
return "%s...%s" % ( head , tail ) |
def _set_interface_detail ( self , v , load = False ) :
"""Setter method for interface _ detail , mapped from YANG variable / isis _ state / interface _ detail ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ interface _ detail is considered as a private
... | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = interface_detail . interface_detail , is_container = 'container' , presence = False , yang_name = "interface-detail" , rest_name = "interface-detail" , parent = self , path_helper = self . _path_helper , extmethods = self . _... |
def show_firmware_version_output_show_firmware_version_os_version ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
show_firmware_version = ET . Element ( "show_firmware_version" )
config = show_firmware_version
output = ET . SubElement ( show_firmware_version , "output" )
show_firmware_version = ET . SubElement ( output , "show-firmware-version" )
os_version = ET . SubElement ( show_firmware_versi... |
def normalize_variable_name ( node , reachability_tester ) : # type : ( Dict [ str , Any ] , ReferenceReachabilityTester ) - > Optional [ str ]
"""Returns normalized variable name .
Normalizing means that variable names get explicit visibility by
visibility prefix such as : " g : " , " s : " , . . .
Returns N... | node_type = NodeType ( node [ 'type' ] )
if not is_analyzable_identifier ( node ) :
return None
if node_type is NodeType . IDENTIFIER :
return _normalize_identifier_value ( node , reachability_tester )
# Nodes identifier - like without identifier is always normalized because
# the nodes can not have a visibilit... |
def _partialParseQUnits ( self , s , sourceTime ) :
"""test if giving C { s } matched CRE _ QUNITS , used by L { parse ( ) }
@ type s : string
@ param s : date / time text to evaluate
@ type sourceTime : struct _ time
@ param sourceTime : C { struct _ time } value to use as the base
@ rtype : tuple
@ re... | parseStr = None
chunk1 = chunk2 = ''
# Quantity + Units
m = self . ptc . CRE_QUNITS . search ( s )
if m is not None :
debug and log . debug ( 'CRE_QUNITS matched' )
if self . _UnitsTrapped ( s , m , 'qunits' ) :
debug and log . debug ( 'day suffix trapped by qunit match' )
else :
if ( m . gr... |
def to_object ( self , data ) :
"""Deserialize input data
: param data : serialized input Data object
: return : Deserialized object""" | if not isinstance ( data , Data ) :
return data
if is_null_data ( data ) :
return None
inp = self . _create_data_input ( data )
try :
type_id = data . get_type ( )
serializer = self . _registry . serializer_by_type_id ( type_id )
if serializer is None :
if self . _active :
raise ... |
def parse_navigation_html_to_tree ( html , id ) :
"""Parse the given ` ` html ` ` ( an etree object ) to a tree .
The ` ` id ` ` is required in order to assign the top - level tree id value .""" | def xpath ( x ) :
return html . xpath ( x , namespaces = HTML_DOCUMENT_NAMESPACES )
try :
value = xpath ( '//*[@data-type="binding"]/@data-value' ) [ 0 ]
is_translucent = value == 'translucent'
except IndexError :
is_translucent = False
if is_translucent :
id = TRANSLUCENT_BINDER_ID
tree = { 'id' : ... |
def create ( cls , request_inquiries , total_amount_inquired , monetary_account_id = None , status = None , event_id = None , custom_headers = None ) :
"""Create a request batch by sending an array of single request objects ,
that will become part of the batch .
: type user _ id : int
: type monetary _ accoun... | if custom_headers is None :
custom_headers = { }
request_map = { cls . FIELD_REQUEST_INQUIRIES : request_inquiries , cls . FIELD_STATUS : status , cls . FIELD_TOTAL_AMOUNT_INQUIRED : total_amount_inquired , cls . FIELD_EVENT_ID : event_id }
request_map_string = converter . class_to_json ( request_map )
request_map_... |
def templateParametersStringAsRestList ( self , nodeByRefid ) :
'''. . todo : :
document this , create another method for creating this without the need for
generating links , to be used in making the node titles and labels''' | if not self . template_params :
return None
else :
param_stream = StringIO ( )
for param_t , decl_n , def_n in self . template_params :
refid , typeid = param_t
# Say you wanted a custom link text ' custom ' , and somewhere
# else you had an internal link ' . . _ some _ link : ' . Th... |
def markdown_search_user ( request ) :
"""Json usernames of the users registered & actived .
url ( method = get ) :
/ martor / search - user / ? username = { username }
Response :
error :
- ` status ` is status code ( 204)
- ` error ` is error message .
success :
- ` status ` is status code ( 204)
... | data = { }
username = request . GET . get ( 'username' )
if username is not None and username != '' and ' ' not in username :
users = User . objects . filter ( Q ( username__icontains = username ) ) . filter ( is_active = True )
if users . exists ( ) :
data . update ( { 'status' : 200 , 'data' : [ { 'us... |
def is_all_field_none ( self ) :
""": rtype : bool""" | if self . _id_ is not None :
return False
if self . _time_responded is not None :
return False
if self . _time_expiry is not None :
return False
if self . _monetary_account_id is not None :
return False
if self . _amount_inquired is not None :
return False
if self . _amount_responded is not None :
... |
def getStickXY ( TableName ) :
"""Get X and Y for fine plotting of a stick spectrum .
Usage : X , Y = getStickXY ( TableName ) .""" | cent , intens = getColumns ( TableName , ( 'nu' , 'sw' ) )
n = len ( cent )
cent_ = zeros ( n * 3 )
intens_ = zeros ( n * 3 )
for i in range ( n ) :
intens_ [ 3 * i ] = 0
intens_ [ 3 * i + 1 ] = intens [ i ]
intens_ [ 3 * i + 2 ] = 0
cent_ [ ( 3 * i ) : ( 3 * i + 3 ) ] = cent [ i ]
return cent_ , intens... |
def parse_and_normalize_url_date ( date_str ) :
"""Parse a ISO 8601 date - time with optional timezone .
- Return as datetime with timezone adjusted to UTC .
- Return naive date - time set to UTC .""" | if date_str is None :
return None
try :
return d1_common . date_time . dt_from_iso8601_str ( date_str )
except d1_common . date_time . iso8601 . ParseError as e :
raise d1_common . types . exceptions . InvalidRequest ( 0 , 'Invalid date format for URL parameter. date="{}" error="{}"' . format ( date_str , s... |
def list_themes ( dark = True ) :
"""List all installed theme files .""" | dark = "dark" if dark else "light"
themes = os . scandir ( os . path . join ( MODULE_DIR , "colorschemes" , dark ) )
return [ t for t in themes if os . path . isfile ( t . path ) ] |
def subjects_download ( self , subject_id ) :
"""Get data file for subject with given identifier .
Parameters
subject _ id : string
Unique subject identifier
Returns
FileInfo
Information about subject ' s data file on disk or None if identifier
is unknown""" | # Retrieve subject to ensure that it exist
subject = self . subjects_get ( subject_id )
if subject is None : # Return None if subject is unknown
return None
else : # Reference and information for original uploaded file
return FileInfo ( subject . data_file , subject . properties [ datastore . PROPERTY_MIMETYPE ... |
def create_device_role ( role , color ) :
'''. . versionadded : : 2019.2.0
Create a device role
role
String of device role , e . g . , ` ` router ` `
CLI Example :
. . code - block : : bash
salt myminion netbox . create _ device _ role router''' | nb_role = get_ ( 'dcim' , 'device-roles' , name = role )
if nb_role :
return False
else :
payload = { 'name' : role , 'slug' : slugify ( role ) , 'color' : color }
role = _add ( 'dcim' , 'device-roles' , payload )
if role :
return { 'dcim' : { 'device-roles' : payload } }
else :
retu... |
def _execute_level ( self , level , audio_file_mfcc , text_files , sync_roots , force_aba_auto = False ) :
"""Compute the alignment for all the nodes in the given level .
Return a pair ( next _ level _ text _ files , next _ level _ sync _ roots ) ,
containing two lists of text file subtrees and sync map subtree... | self . _set_synthesizer ( )
next_level_text_files = [ ]
next_level_sync_roots = [ ]
for text_file_index , text_file in enumerate ( text_files ) :
self . log ( [ u"Text level %d, fragment %d" , level , text_file_index ] )
self . log ( [ u" Len: %d" , len ( text_file ) ] )
sync_root = sync_roots [ text_fil... |
def _mk_adjacency_matrix ( self , section , proportion , flats , elev , mag , dX , dY ) :
"""Calculates the adjacency of connectivity matrix . This matrix tells
which pixels drain to which .
For example , the pixel i , will recieve area from np . nonzero ( A [ i , : ] )
at the proportions given in A [ i , : ]... | shp = section . shape
mat_data = np . row_stack ( ( proportion , 1 - proportion ) )
NN = np . prod ( shp )
i12 = np . arange ( NN ) . reshape ( shp )
j1 = - np . ones_like ( i12 )
j2 = - np . ones_like ( i12 )
# make the connectivity for the non - flats / pits
j1 , j2 = self . _mk_connectivity ( section , i12 , j1 , j2... |
def float_constructor ( loader , node ) :
"""Construct Decimal from YAML float encoding .""" | s = loader . construct_scalar ( node )
if s == '.inf' :
return Decimal ( 'Infinity' )
elif s == '-.inf' :
return - Decimal ( 'Infinity' )
elif s == '.nan' :
return Decimal ( 'NaN' )
return Decimal ( s ) |
def overlap1d ( l1 , l2 , PAx , PBx , gamma ) :
"""The one - dimensional component of the overlap integral . Taken from THO eq . 2.12
> > > isclose ( overlap1d ( 0,0,0,0,1 ) , 1.0)
True""" | total = 0
for i in range ( 1 + int ( floor ( 0.5 * ( l1 + l2 ) ) ) ) :
total += binomial_prefactor ( 2 * i , l1 , l2 , PAx , PBx ) * fact2 ( 2 * i - 1 ) / pow ( 2 * gamma , i )
return total |
def analyze ( self , text ) :
"""Analyze text and return pretty format .
Args :
text : string , the input text .
Returns :
res : dict .
Examples :
> > > text = ' President Obama is speaking at the White House . '
> > > model . analyze ( text )
" words " : [
" President " ,
" Obama " ,
" is " ,... | pred = self . predict_proba ( text )
tags = self . _get_tags ( pred )
prob = self . _get_prob ( pred )
res = self . _build_response ( text , tags , prob )
return res |
def modify_content ( request , page_id , content_type , language_id ) :
"""Modify the content of a page .""" | page = get_object_or_404 ( Page , pk = page_id )
perm = request . user . has_perm ( 'pages.change_page' )
if perm and request . method == 'POST' :
content = request . POST . get ( 'content' , False )
if not content :
raise Http404
page = Page . objects . get ( pk = page_id )
if settings . PAGE_C... |
def Start ( self , Minimized = False , Nosplash = False ) :
"""Starts Skype application .
: Parameters :
Minimized : bool
If True , Skype is started minimized in system tray .
Nosplash : bool
If True , no splash screen is displayed upon startup .""" | self . _Skype . _Api . startup ( Minimized , Nosplash ) |
def get_slice ( self , slice_type , slice_number , time_point = 0 ) :
"""Returns a slice of the dataset .
slice . data contains the window / levelled values , in uint8
slice . original _ data contains the original data for this slice
: param time _ point : in case of 4d nifti the 4th dimension
: param slice... | slice_ = Slice ( )
slice_ . slice_number = slice_number
# assert that slice _ number is withing the range
assert slice_number >= 0
assert slice_number < self . _get_number_of_slices ( slice_type )
slice_data = None
if slice_type == SliceType . AXIAL :
slice_data = self . __get_raw_slice__ ( slice_number , self . ax... |
def user_create ( name , passwd , database = None , user = None , password = None , host = None , port = None ) :
'''Create a cluster admin or a database user .
If a database is specified : it will create database user .
If a database is not specified : it will create a cluster admin .
name
User name for th... | if user_exists ( name , database , user , password , host , port ) :
if database :
log . info ( 'User \'%s\' already exists for DB \'%s\'' , name , database )
else :
log . info ( 'Cluster admin \'%s\' already exists' , name )
return False
client = _client ( user = user , password = password ... |
def main ( ) :
'''Main function''' | # We should only steal the root logger if we ' re the application , not the module
logging . basicConfig ( level = logging . DEBUG )
args = get_args ( )
if args . password :
password = args . password
else :
password = getpass ( prompt = 'Enter password for {}@{}: ' . format ( args . user , args . host ) )
opsv... |
def normalize_allele_name ( raw_allele , omit_dra1 = False , infer_class2_pair = True ) :
"""MHC alleles are named with a frustratingly loose system . It ' s not uncommon
to see dozens of different forms for the same allele .
Note : this function works with both class I and class II allele names ( including
a... | cache_key = ( raw_allele , omit_dra1 , infer_class2_pair )
if cache_key in _normalized_allele_cache :
return _normalized_allele_cache [ cache_key ]
parsed_alleles = parse_classi_or_classii_allele_name ( raw_allele , infer_pair = infer_class2_pair )
species = parsed_alleles [ 0 ] . species
normalized_list = [ specie... |
async def get_value ( self ) :
"""Get the value from the API . Make sure to use a lock in order not to
fetch the value twice at the same time .""" | cc = self . request . custom_content
async with self . lock :
if self . content_key not in cc :
cc [ self . content_key ] = await self . call_api ( )
return cc [ self . content_key ] |
def _safe_sendBreak_v2_7 ( self ) : # pylint : disable = invalid - name
"""! pyserial 2.7 API implementation of sendBreak / setBreak
@ details
Below API is deprecated for pyserial 3 . x versions !
http : / / pyserial . readthedocs . org / en / latest / pyserial _ api . html # serial . Serial . sendBreak
htt... | result = True
try :
self . sendBreak ( )
except : # pylint : disable = bare - except
# In Linux a termios . error is raised in sendBreak and in setBreak .
# The following setBreak ( ) is needed to release the reset signal on the target mcu .
try :
self . setBreak ( False )
except : # pylint : disabl... |
def change_max_svc_check_attempts ( self , service , check_attempts ) :
"""Modify max service check attempt
Format of the line that triggers function call : :
CHANGE _ MAX _ SVC _ CHECK _ ATTEMPTS ; < host _ name > ; < service _ description > ; < check _ attempts >
: param service : service to edit
: type s... | service . modified_attributes |= DICT_MODATTR [ "MODATTR_MAX_CHECK_ATTEMPTS" ] . value
service . max_check_attempts = check_attempts
if service . state_type == u'HARD' and service . state == u'OK' and service . attempt > 1 :
service . attempt = service . max_check_attempts
self . send_an_element ( service . get_upd... |
def matched_interpreters ( interpreters , constraints ) :
"""Given some filters , yield any interpreter that matches at least one of them .
: param interpreters : a list of PythonInterpreter objects for filtering
: param constraints : A sequence of strings that constrain the interpreter compatibility for this
... | for interpreter in interpreters :
if any ( interpreter . identity . matches ( filt ) for filt in constraints ) :
TRACER . log ( "Constraints on interpreters: %s, Matching Interpreter: %s" % ( constraints , interpreter . binary ) , V = 3 )
yield interpreter |
def crypto_aead_chacha20poly1305_encrypt ( message , aad , nonce , key ) :
"""Encrypt the given ` ` message ` ` using the " legacy " construction
described in draft - agl - tls - chacha20poly1305.
: param message :
: type message : bytes
: param aad :
: type aad : bytes
: param nonce :
: type nonce : ... | ensure ( isinstance ( message , bytes ) , 'Input message type must be bytes' , raising = exc . TypeError )
mlen = len ( message )
ensure ( mlen <= crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX , 'Message must be at most {0} bytes long' . format ( crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX ) , raising = exc . ValueEr... |
def p_param_definition ( p ) :
"""param _ definition : param _ def""" | p [ 0 ] = p [ 1 ]
if p [ 0 ] is not None :
p [ 0 ] . byref = OPTIONS . byref . value |
def loadCats ( self , ids = [ ] ) :
"""Load cats with the specified ids .
: param ids ( int array ) : integer ids specifying cats
: return : cats ( object array ) : loaded cat objects""" | if _isArrayLike ( ids ) :
return [ self . cats [ id ] for id in ids ]
elif type ( ids ) == int :
return [ self . cats [ ids ] ] |
def start ( self ) :
"""Starts the coordinator thread and all related worker threads .""" | assert not self . interrupted
for thread in self . worker_threads :
thread . start ( )
WorkerThread . start ( self ) |
def Space ( self , n = 1 , dl = 0 ) :
"""空格键n次""" | self . Delay ( dl )
self . keyboard . tap_key ( " " , n ) |
def augpath ( path , augsuf = '' , augext = '' , augpref = '' , augdir = None , newext = None , newfname = None , ensure = False , prefix = None , suffix = None ) :
"""augments end of path before the extension .
augpath
Args :
path ( str ) :
augsuf ( str ) : augment filename before extension
Returns :
s... | if prefix is not None :
augpref = prefix
if suffix is not None :
augsuf = suffix
# Breakup path
dpath , fname = split ( path )
fname_noext , ext = splitext ( fname )
if newfname is not None :
fname_noext = newfname
# Augment ext
if newext is None :
newext = ext
# Augment fname
new_fname = '' . join ( ( ... |
def deploy ( self , initial_instance_count , instance_type , accelerator_type = None , endpoint_name = None , use_compiled_model = False , update_endpoint = False , ** kwargs ) :
"""Deploy the trained model to an Amazon SageMaker endpoint and return a ` ` sagemaker . RealTimePredictor ` ` object .
More informatio... | self . _ensure_latest_training_job ( )
endpoint_name = endpoint_name or self . latest_training_job . name
self . deploy_instance_type = instance_type
if use_compiled_model :
family = '_' . join ( instance_type . split ( '.' ) [ : - 1 ] )
if family not in self . _compiled_models :
raise ValueError ( "No ... |
def has_node_with_value ( self , value ) :
"""Whether any node in ` ` self . node _ list ` ` has the value ` ` value ` ` .
Args :
value ( Any ) : The value to find in ` ` self . node _ list ` `
Returns : bool
Example :
> > > from blur . markov . node import Node
> > > node _ 1 = Node ( ' One ' )
> > >... | for node in self . node_list :
if node . value == value :
return True
else :
return False |
def getRemote ( self , name : str = None , ha : HA = None ) :
"""Find the remote by name or ha .
: param name : the name of the remote to find
: param ha : host address pair the remote to find
: raises : RemoteNotFound""" | return self . findInRemotesByName ( name ) if name else self . findInRemotesByHA ( ha ) |
def get_annotations ( self , atype = None , label = None ) :
"""Retrieve the annotations for this item from the server
: type atype : String
: param atype : return only results with a matching Type field
: type label : String
: param label : return only results with a matching Label field
: rtype : String... | return self . client . get_item_annotations ( self . url ( ) , atype , label ) |
def plot_quadpole_evolution ( dataobj , quadpole , cols , threshold = 5 , rolling = False , ax = None ) :
"""Visualize time - lapse evolution of a single quadropole .
Parameters
dataobj : : py : class : ` pandas . DataFrame `
DataFrame containing the data . Please refer to the documentation for
required col... | if isinstance ( dataobj , pd . DataFrame ) :
df = dataobj
else :
df = dataobj . data
subquery = df . query ( 'a == {0} and b == {1} and m == {2} and n == {3}' . format ( * quadpole ) )
# rhoa = subquery [ ' rho _ a ' ] . values
# rhoa [ 30 ] = 300
# subquery [ ' rho _ a ' ] = rhoa
if ax is not None :
fig = ... |
def comment_lines ( lines , prefix ) :
"""Return commented lines""" | if not prefix :
return lines
return [ prefix + ' ' + line if line else prefix for line in lines ] |
def _parse_publisher ( details ) :
"""Parse publisher of the book .
Args :
details ( obj ) : HTMLElement containing slice of the page with details .
Returns :
str / None : Publisher ' s name as string or None if not found .""" | publisher = _get_td_or_none ( details , "ctl00_ContentPlaceHolder1_tblRowNakladatel" )
# publisher is not specified
if not publisher :
return None
publisher = dhtmlparser . removeTags ( publisher ) . strip ( )
# return None instead of blank string
if not publisher :
return None
return publisher |
def bin_stream ( stream , content_type , status = '200 OK' , headers = None ) :
"""Utility method for constructing a binary response .
: param Any stream : The response body stream
: param str content _ type : The content - type of the response
: param str status : The HTTP status line
: param list [ tuple ... | def_headers = [ ( 'Content-Type' , content_type ) ]
if headers :
def_headers += headers
status_headers = StatusAndHeaders ( status , def_headers )
return WbResponse ( status_headers , value = stream ) |
def move ( self , path , raise_if_exists = False ) :
"""Call MockFileSystem ' s move command""" | self . fs . move ( self . path , path , raise_if_exists ) |
def PushItem ( self , item , block = True ) :
"""Push an item on to the queue .
If no ZeroMQ socket has been created , one will be created the first time
this method is called .
Args :
item ( object ) : item to push on the queue .
block ( Optional [ bool ] ) : whether the push should be performed in block... | if not self . _closed_event :
raise RuntimeError ( 'Missing closed event.' )
if self . _closed_event . is_set ( ) :
raise errors . QueueAlreadyClosed ( )
if not self . _zmq_socket :
self . _CreateZMQSocket ( )
try :
if block :
self . _queue . put ( item , timeout = self . timeout_seconds )
e... |
def _deconstruct_url ( self , url : str ) -> List [ str ] :
"""Split a regular URL into parts
: param url : A normalized URL
: return : Parts of the URL
: raises kua . routes . RouteError : If the depth of the URL exceeds the max depth of the deepest registered pattern
: private :""" | parts = url . split ( '/' , self . _max_depth + 1 )
if depth_of ( parts ) > self . _max_depth :
raise RouteError ( 'No match' )
return parts |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.