signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def better_exec_command ( ssh , command , msg ) :
"""Uses paramiko to execute a command but handles failure by raising a ParamikoError if the command fails .
Note that unlike paramiko . SSHClient . exec _ command this is not asynchronous because we wait until the exit status is known
: Parameter ssh : a paramik... | chan = ssh . get_transport ( ) . open_session ( )
chan . exec_command ( command )
exit_status = chan . recv_exit_status ( )
if exit_status != 0 :
msg_str = chan . recv_stderr ( 1024 )
err_msgs = [ ]
while len ( msg_str ) > 0 :
err_msgs . append ( msg_str )
msg_str = chan . recv_stderr ( 1024... |
def from_element ( self , element , defaults = { } ) :
"""Populate object variables from SVD element""" | if isinstance ( defaults , SvdElement ) :
defaults = vars ( defaults )
for key in self . props :
try :
value = element . find ( key ) . text
except AttributeError : # Maybe it ' s attribute ?
default = defaults [ key ] if key in defaults else None
value = element . get ( key , defaul... |
def _refresh_editor_and_scrollbars ( self ) :
"""Refrehes editor content and scollbars .
We generate a fake resize event to refresh scroll bar .
We have the same problem as described here :
http : / / www . qtcentre . org / threads / 44803 and we apply the same solution
( don ' t worry , there is no visual ... | TextHelper ( self . editor ) . mark_whole_doc_dirty ( )
self . editor . repaint ( )
s = self . editor . size ( )
s . setWidth ( s . width ( ) + 1 )
self . editor . resizeEvent ( QResizeEvent ( self . editor . size ( ) , s ) ) |
def update_wrapper ( wrapper , wrapped , assigned = functools . WRAPPER_ASSIGNMENTS , updated = functools . WRAPPER_UPDATES ) :
"""Patch two bugs in functools . update _ wrapper .""" | # workaround for http : / / bugs . python . org / issue3445
assigned = tuple ( attr for attr in assigned if hasattr ( wrapped , attr ) )
wrapper = functools . update_wrapper ( wrapper , wrapped , assigned , updated )
# workaround for https : / / bugs . python . org / issue17482
wrapper . __wrapped__ = wrapped
return wr... |
def to_unicode ( x , unaccent = False ) :
"""Convert a string to unicode""" | s = str ( x )
if unaccent :
cs = [ c for c in unicodedata . normalize ( 'NFD' , s ) if unicodedata . category ( c ) != 'Mn' ]
s = '' . join ( cs )
return s |
def _get_areas ( self ) :
"""Return surface elements area values in a numpy array .""" | if self . areas is None :
self . areas = [ ]
for surf in self . surfaces :
self . areas . append ( surf . get_area ( ) )
self . areas = numpy . array ( self . areas )
return self . areas |
def from_json_dict ( cls , json_dict # type : Dict [ str , Any ]
) : # type : ( . . . ) - > EnumSpec
"""Make a EnumSpec object from a dictionary containing its
properties .
: param dict json _ dict : This dictionary must contain an
` ' enum ' ` key specifying the permitted values . In
addition , it must con... | # noinspection PyCompatibility
result = cast ( EnumSpec , # Appease the gods of Mypy .
super ( ) . from_json_dict ( json_dict ) )
format_ = json_dict [ 'format' ]
result . values = set ( format_ [ 'values' ] )
return result |
def ld_prune ( df , ld_beds , snvs = None ) :
"""Prune set of GWAS based on LD and significance . A graph of all SNVs is
constructed with edges for LD > = 0.8 and the most significant SNV per
connected component is kept .
Parameters
df : pandas . DataFrame
Pandas dataframe with unique SNVs . The index is ... | import networkx as nx
import tabix
if snvs :
df = df . ix [ set ( df . index ) & set ( snvs ) ]
keep = set ( )
for chrom in ld_beds . keys ( ) :
tdf = df [ df [ 'chrom' ] . astype ( str ) == chrom ]
if tdf . shape [ 0 ] > 0 :
f = tabix . open ( ld_beds [ chrom ] )
# Make a dict where each ke... |
def set_meta ( self , name , format , * args ) :
"""Set certificate metadata from formatted string .""" | return lib . zcert_set_meta ( self . _as_parameter_ , name , format , * args ) |
def _get_config_dir ( ) :
"""Returns the sawtooth configuration directory based on the
SAWTOOTH _ HOME environment variable ( if set ) or OS defaults .""" | if 'SAWTOOTH_HOME' in os . environ :
return os . path . join ( os . environ [ 'SAWTOOTH_HOME' ] , 'etc' )
if os . name == 'nt' :
base_dir = os . path . dirname ( os . path . dirname ( os . path . abspath ( sys . argv [ 0 ] ) ) )
return os . path . join ( base_dir , 'conf' )
return '/etc/sawtooth' |
def pause_knocks ( obj ) :
"""Context manager to suspend sending knocks for the given model
: param obj : model instance""" | if not hasattr ( _thread_locals , 'knock_enabled' ) :
_thread_locals . knock_enabled = { }
obj . __class__ . _disconnect ( )
_thread_locals . knock_enabled [ obj . __class__ ] = False
yield
_thread_locals . knock_enabled [ obj . __class__ ] = True
obj . __class__ . _connect ( ) |
def apply_encoding_options ( self , min_token_count = 1 , limit_top_tokens = None ) :
"""Applies the given settings for subsequent calls to ` encode _ texts ` and ` decode _ texts ` . This allows you to
play with different settings without having to re - run tokenization on the entire corpus .
Args :
min _ to... | if not self . has_vocab :
raise ValueError ( "You need to build the vocabulary using `build_vocab` " "before using `apply_encoding_options`" )
if min_token_count < 1 :
raise ValueError ( "`min_token_count` should atleast be 1" )
# Remove tokens with freq < min _ token _ count
token_counts = list ( self . _token... |
def _unkown_type ( self , uridecodebin , decodebin , caps ) :
"""The callback for decodebin ' s " unknown - type " signal .""" | # This is called * before * the stream becomes ready when the
# file can ' t be read .
streaminfo = caps . to_string ( )
if not streaminfo . startswith ( 'audio/' ) : # Ignore non - audio ( e . g . , video ) decode errors .
return
self . read_exc = UnknownTypeError ( streaminfo )
self . ready_sem . release ( ) |
def version_cmd ( argv = sys . argv [ 1 : ] ) : # pragma : no cover
"""Print the version number of Palladium .
Usage :
pld - version [ options ]
Options :
- h - - help Show this screen .""" | docopt ( version_cmd . __doc__ , argv = argv )
print ( __version__ ) |
def noaa_to_lpd ( files ) :
"""Convert NOAA format to LiPD format
: param dict files : Files metadata
: return None :""" | logger_noaa . info ( "enter process_noaa" )
# only continue if the user selected a mode correctly
logger_noaa . info ( "Found {} NOAA txt file(s)" . format ( str ( len ( files [ ".txt" ] ) ) ) )
print ( "Found {} NOAA txt file(s)" . format ( str ( len ( files [ ".txt" ] ) ) ) )
# Process each available file of the spec... |
def to_point ( self , timestamp ) :
"""Get a Point conversion of this aggregation .
: type timestamp : : class : ` datetime . datetime `
: param timestamp : The time to report the point as having been recorded .
: rtype : : class : ` opencensus . metrics . export . point . Point `
: return : a : class : ` o... | return point . Point ( value . ValueLong ( self . count_data ) , timestamp ) |
def get_kernel_spec ( self , kernel_name ) :
"""Returns a : class : ` KernelSpec ` instance for the given kernel _ name .
Raises : exc : ` NoSuchKernel ` if the given kernel name is not found .""" | if kernel_name == CURRENT_ENV_KERNEL_NAME :
return self . kernel_spec_class ( resource_dir = ipykernel . kernelspec . RESOURCES , ** ipykernel . kernelspec . get_kernel_dict ( ) )
else :
return super ( NbvalKernelspecManager , self ) . get_kernel_spec ( kernel_name ) |
def plot_data ( orig_data , data ) :
'''plot data in 3D''' | import numpy as np
from mpl_toolkits . mplot3d import Axes3D
import matplotlib . pyplot as plt
for dd , c in [ ( orig_data , 'r' ) , ( data , 'b' ) ] :
fig = plt . figure ( )
ax = fig . add_subplot ( 111 , projection = '3d' )
xs = [ d . x for d in dd ]
ys = [ d . y for d in dd ]
zs = [ d . z for d i... |
def rooms ( self , sid , namespace = None ) :
"""Return the rooms a client is in .
: param sid : Session ID of the client .
: param namespace : The Socket . IO namespace for the event . If this
argument is omitted the default namespace is used .""" | namespace = namespace or '/'
return self . manager . get_rooms ( sid , namespace ) |
def insert_attribute ( self , att , index ) :
"""Inserts the attribute at the specified location .
: param att : the attribute to insert
: type att : Attribute
: param index : the index to insert the attribute at
: type index : int""" | javabridge . call ( self . jobject , "insertAttributeAt" , "(Lweka/core/Attribute;I)V" , att . jobject , index ) |
def project_describe ( object_id , input_params = { } , always_retry = True , ** kwargs ) :
"""Invokes the / project - xxxx / describe API method .
For more info , see : https : / / wiki . dnanexus . com / API - Specification - v1.0.0 / Projects # API - method % 3A - % 2Fproject - xxxx % 2Fdescribe""" | return DXHTTPRequest ( '/%s/describe' % object_id , input_params , always_retry = always_retry , ** kwargs ) |
def update_widget_channels ( self ) :
"""Parameters
axis : ' x ' or ' y '
event : pick of list text""" | sel1 = self . x_axis_list . GetSelection ( )
sel2 = self . y_axis_list . GetSelection ( )
if sel1 >= 0 and sel2 >= 0 :
channel_1 = self . x_axis_list . GetString ( sel1 )
channel_2 = self . y_axis_list . GetString ( sel2 )
self . fcgatemanager . set_axes ( ( channel_1 , channel_2 ) , self . ax ) |
def reread ( self ) :
"""Read configuration file and substitute references into checks conf""" | logger . debug ( "Loading settings from %s" , os . path . abspath ( self . filename ) )
conf = self . read_conf ( )
changed = self . creds . reread ( )
checks = self . parser . parse_checks ( conf )
if self . checks != checks :
self . checks = checks
return True
else :
return changed |
def get_workflow_status_of ( brain_or_object , state_var = "review_state" ) :
"""Get the current workflow status of the given brain or context .
: param brain _ or _ object : A single catalog brain or content object
: type brain _ or _ object : ATContentType / DexterityContentType / CatalogBrain
: param state... | workflow = get_tool ( "portal_workflow" )
obj = get_object ( brain_or_object )
return workflow . getInfoFor ( ob = obj , name = state_var ) |
def attention_lm_moe_base_memeff ( ) :
"""Base model with attention expert .""" | hparams = attention_lm_moe_base_long_seq ( )
hparams . use_sepconv = False
hparams . diet_experts = True
hparams . layer_preprocess_sequence = "n"
hparams . layer_postprocess_sequence = "da"
hparams . layer_prepostprocess_dropout = 0.0
hparams . memory_efficient_ffn = True
hparams . attention_type = AttentionType . MEM... |
def set_headers ( self ) -> None :
"""Sets the content and caching headers on the response .
. . versionadded : : 3.1""" | self . set_header ( "Accept-Ranges" , "bytes" )
self . set_etag_header ( )
if self . modified is not None :
self . set_header ( "Last-Modified" , self . modified )
content_type = self . get_content_type ( )
if content_type :
self . set_header ( "Content-Type" , content_type )
cache_time = self . get_cache_time ... |
def prefixes ( self ) :
"""list all prefixes used""" | pset = set ( )
for n in self . nodes ( ) :
pfx = self . prefix ( n )
if pfx is not None :
pset . add ( pfx )
return list ( pset ) |
def _prepare_uri ( self , path , query_params = { } ) :
"""Prepares a full URI with the selected information .
` ` path ` ` :
Path can be in one of two formats :
- If : attr : ` server ` was defined , the ` ` path ` ` will be appended
to the existing host , or
- an absolute URL
` ` query _ params ` ` : ... | query_str = urllib . urlencode ( query_params )
# If we have a relative path ( as opposed to a full URL ) , build it of
# the connection info
if path . startswith ( '/' ) and self . server :
protocol = self . protocol
server = self . server
else :
protocol , server , path , _ , _ , _ = urlparse . urlparse (... |
def CopyFromStringTuple ( self , time_elements_tuple ) :
"""Copies time elements from string - based time elements tuple .
Args :
time _ elements _ tuple ( Optional [ tuple [ str , str , str , str , str , str ] ] ) :
time elements , contains year , month , day of month , hours , minutes and
seconds .
Rais... | if len ( time_elements_tuple ) < 6 :
raise ValueError ( ( 'Invalid time elements tuple at least 6 elements required,' 'got: {0:d}' ) . format ( len ( time_elements_tuple ) ) )
try :
year = int ( time_elements_tuple [ 0 ] , 10 )
except ( TypeError , ValueError ) :
raise ValueError ( 'Invalid year value: {0!s... |
def plt_goids ( gosubdag , fout_img , goids , ** kws_plt ) :
"""Plot GO IDs in a DAG ( Directed Acyclic Graph ) .""" | gosubdag_plt = GoSubDag ( goids , gosubdag . go2obj , rcntobj = gosubdag . rcntobj , ** kws_plt )
godagplot = GoSubDagPlot ( gosubdag_plt , ** kws_plt )
godagplot . plt_dag ( fout_img )
return godagplot |
def execute ( self , sql , args = None ) :
"""It is used for update , delete records .
: param sql string : the sql stamtement like ' select * from % s '
: param args list : Wen set None , will use dbi execute ( sql ) , else
dbi execute ( sql , args ) , the args keep the original rules , it shuld be tuple or ... | con = self . pool . pop ( )
c = None
try :
c = con . cursor ( )
LOGGER . debug ( "Execute sql: " + sql + " args:" + str ( args ) )
if type ( args ) is tuple :
c . execute ( sql , args )
elif type ( args ) is list :
if len ( args ) > 1 and type ( args [ 0 ] ) in ( list , tuple ) :
... |
def get_lab_text ( lab_slug , language ) :
"""Gets text description in English or Italian from a single lab from makeinitaly . foundation .""" | if language == "English" or language == "english" or language == "EN" or language == "En" :
language = "en"
elif language == "Italian" or language == "italian" or language == "IT" or language == "It" or language == "it" :
language = "it"
else :
language = "en"
wiki = MediaWiki ( makeinitaly__foundation_api_... |
def pypi_search ( self ) :
"""Search PyPI by metadata keyword
e . g . yolk - S name = yolk AND license = GPL
@ param spec : Cheese Shop search spec
@ type spec : list of strings
spec examples :
[ " name = yolk " ]
[ " license = GPL " ]
[ " name = yolk " , " AND " , " license = GPL " ]
@ returns : 0 ... | spec = self . pkg_spec
# Add remainging cli arguments to options . pypi _ search
search_arg = self . options . pypi_search
spec . insert ( 0 , search_arg . strip ( ) )
( spec , operator ) = self . parse_search_spec ( spec )
if not spec :
return 1
for pkg in self . pypi . search ( spec , operator ) :
if pkg [ 's... |
def secure ( new_user = env . user ) :
"""Minimal security steps for brand new servers .
Installs system updates , creates new user ( with sudo privileges ) for future
usage , and disables root login via SSH .""" | run ( "apt-get update -q" )
run ( "apt-get upgrade -y -q" )
run ( "adduser --gecos '' %s" % new_user )
run ( "usermod -G sudo %s" % new_user )
run ( "sed -i 's:RootLogin yes:RootLogin no:' /etc/ssh/sshd_config" )
run ( "service ssh restart" )
print ( green ( "Security steps completed. Log in to the server as '%s' from ... |
def rectangle ( self , edge_style , pat , x1 , y1 , x2 , y2 , shadow = None ) :
"""Draw a rectangle with EDGE _ STYLE , fill with PAT , and the
bounding box ( X1 , Y1 , X2 , Y2 ) . SHADOW is either None or a
tuple ( XDELTA , YDELTA , fillstyle ) . If non - null , a shadow of
FILLSTYLE is drawn beneath the pol... | self . polygon ( edge_style , pat , [ ( x1 , y1 ) , ( x1 , y2 ) , ( x2 , y2 ) , ( x2 , y1 ) ] , shadow ) |
def get_symmetry_operations ( self , cartesian = False ) :
"""Return symmetry operations as a list of SymmOp objects .
By default returns fractional coord symmops .
But cartesian can be returned too .
Returns :
( [ SymmOp ] ) : List of symmetry operations .""" | rotation , translation = self . _get_symmetry ( )
symmops = [ ]
mat = self . _structure . lattice . matrix . T
invmat = np . linalg . inv ( mat )
for rot , trans in zip ( rotation , translation ) :
if cartesian :
rot = np . dot ( mat , np . dot ( rot , invmat ) )
trans = np . dot ( trans , self . _s... |
def newchild ( self , chld = False ) :
"""Like givebirth ( ) , but also appends the new child to the list of children .""" | if not chld :
chld = self . givebirth ( )
lchld = [ chld ] if type ( chld ) != list else chld
for chldx in lchld :
chldx . parent = self
self . children . append ( chld )
return chld |
def user_can_edit_news ( user ) :
"""Check if the user has permission to edit any of the registered NewsItem
types .""" | newsitem_models = [ model . get_newsitem_model ( ) for model in NEWSINDEX_MODEL_CLASSES ]
if user . is_active and user . is_superuser : # admin can edit news iff any news types exist
return bool ( newsitem_models )
for NewsItem in newsitem_models :
for perm in format_perms ( NewsItem , [ 'add' , 'change' , 'del... |
def next ( self ) :
"""Return next line without end of line marker or raise StopIteration .""" | try :
next_line = next ( self . _f )
except StopIteration :
self . _FinalCheck ( )
raise
self . _line_number += 1
m_eol = re . search ( r"[\x0a\x0d]*$" , next_line )
if m_eol . group ( ) == "\x0d\x0a" :
self . _crlf += 1
if self . _crlf <= 5 :
self . _crlf_examples . append ( self . _line_nu... |
def autodecode ( b ) :
"""Try to decode ` ` bytes ` ` to text - try default encoding first , otherwise try to autodetect
Args :
b ( bytes ) : byte string
Returns :
str : decoded text string""" | import warnings
import chardet
try :
return b . decode ( )
except UnicodeError :
result = chardet . detect ( b )
if result [ 'confidence' ] < 0.95 :
warnings . warn ( 'autodecode failed with utf-8; guessing %s' % result [ 'encoding' ] )
return result . decode ( result [ 'encoding' ] ) |
def probe ( path ) :
"""Probe a repository for its type .
: param str path : The path of the repository
: raises UnknownVCSType : if the repository type couldn ' t be inferred
: returns str : either ` ` git ` ` , ` ` hg ` ` , or ` ` svn ` `
This function employs some heuristics to guess the type of the repo... | import os
from . common import UnknownVCSType
if os . path . isdir ( os . path . join ( path , '.git' ) ) :
return 'git'
elif os . path . isdir ( os . path . join ( path , '.hg' ) ) :
return 'hg'
elif ( os . path . isfile ( os . path . join ( path , 'config' ) ) and os . path . isdir ( os . path . join ( path ,... |
def insert_before ( self , sibling , row = None ) :
"""insert _ before ( sibling , row = None )
: param sibling : A valid : obj : ` Gtk . TreeIter ` , or : obj : ` None `
: type sibling : : obj : ` Gtk . TreeIter ` or : obj : ` None `
: param row : a list of values to apply to the newly inserted row or : obj ... | treeiter = Gtk . ListStore . insert_before ( self , sibling )
if row is not None :
self . set_row ( treeiter , row )
return treeiter |
def main ( self , x ) :
"""Transposed FIR structure""" | self . acc [ 0 ] = x * self . TAPS [ - 1 ]
for i in range ( 1 , len ( self . acc ) ) :
self . acc [ i ] = self . acc [ i - 1 ] + x * self . TAPS [ len ( self . TAPS ) - 1 - i ]
self . out = self . acc [ - 1 ]
return self . out |
def cholesky ( A , ordering_method = 'default' , return_type = RETURN_P_L , use_long = False ) :
'''P A P ' = L L ' ''' | logger . debug ( 'Calculating cholesky decomposition for matrix {!r} with ordering method {}, return type {} and use_long {}.' . format ( A , ordering_method , return_type , use_long ) )
# # check input
return_types = ( RETURN_L , RETURN_L_D , RETURN_P_L , RETURN_P_L_D )
if ordering_method not in CHOLMOD_ORDERING_METHO... |
def add ( name , device ) :
'''Add new device to RAID array .
CLI Example :
. . code - block : : bash
salt ' * ' raid . add / dev / md0 / dev / sda1''' | cmd = 'mdadm --manage {0} --add {1}' . format ( name , device )
if __salt__ [ 'cmd.retcode' ] ( cmd ) == 0 :
return True
return False |
def _flush_batch_incr_counter ( self ) :
"""Increments any unflushed counter values .""" | for key , count in six . iteritems ( self . _counter_dict ) :
if count == 0 :
continue
args = list ( key ) + [ count ]
self . _incr_counter ( * args )
self . _counter_dict [ key ] = 0 |
def get_all_reserved_instances_offerings ( self , reserved_instances_id = None , instance_type = None , availability_zone = None , product_description = None , filters = None ) :
"""Describes Reserved Instance offerings that are available for purchase .
: type reserved _ instances _ id : str
: param reserved _ ... | params = { }
if reserved_instances_id :
params [ 'ReservedInstancesId' ] = reserved_instances_id
if instance_type :
params [ 'InstanceType' ] = instance_type
if availability_zone :
params [ 'AvailabilityZone' ] = availability_zone
if product_description :
params [ 'ProductDescription' ] = product_descri... |
def get_fine_tune_model ( symbol , arg_params , num_classes , layer_name , dtype = 'float32' ) :
"""symbol : the pre - trained network symbol
arg _ params : the argument parameters of the pre - trained model
num _ classes : the number of classes for the fine - tune datasets
layer _ name : the layer name befor... | all_layers = symbol . get_internals ( )
net = all_layers [ layer_name + '_output' ]
net = mx . symbol . FullyConnected ( data = net , num_hidden = num_classes , name = 'fc' )
if dtype == 'float16' :
net = mx . sym . Cast ( data = net , dtype = np . float32 )
net = mx . symbol . SoftmaxOutput ( data = net , name = '... |
def element_info ( cls_or_slf , node , siblings , level , value_dims ) :
"""Return the information summary for an Element . This consists
of the dotted name followed by an value dimension names .""" | info = cls_or_slf . component_type ( node )
if len ( node . kdims ) >= 1 :
info += cls_or_slf . tab + '[%s]' % ',' . join ( d . name for d in node . kdims )
if value_dims and len ( node . vdims ) >= 1 :
info += cls_or_slf . tab + '(%s)' % ',' . join ( d . name for d in node . vdims )
return level , [ ( level , ... |
def get_autoflow ( cls , obj , name ) :
"""Extracts from an object existing dictionary with tensors specified by name .
If there is no such object then new one will be created . Intenally , it appends
autoflow prefix to the name and saves it as an attribute .
: param obj : target GPflow object .
: param nam... | if not isinstance ( name , str ) :
raise ValueError ( 'Name must be string.' )
prefix = cls . __autoflow_prefix__
autoflow_name = prefix + name
store = misc . get_attribute ( obj , autoflow_name , allow_fail = True , default = { } )
if not store :
setattr ( obj , autoflow_name , store )
return store |
def pack_value ( self , val ) :
"""Convert 8 - byte string into 16 - byte list""" | if isinstance ( val , bytes ) :
val = list ( iterbytes ( val ) )
slen = len ( val )
if self . pad :
pad = b'\0\0' * ( slen % 2 )
else :
pad = b''
return struct . pack ( '>' + 'H' * slen , * val ) + pad , slen , None |
def getChecked ( self ) :
"""Gets the checked attributes
: returns : list < str > - - checked attribute names""" | attrs = [ ]
layout = self . layout ( )
for i in range ( layout . count ( ) ) :
w = layout . itemAt ( i ) . widget ( )
if w . isChecked ( ) :
attrs . append ( str ( w . text ( ) ) )
return attrs |
def update_notification_settings ( self , api_token , event , service , should_notify ) :
"""Update a user ' s notification settings .
: param api _ token : The user ' s login api _ token .
: type api _ token : str
: param event : Update the notification settings of this event .
: type event : str
: param... | params = { 'token' : api_token , 'notification_type' : event , 'service' : service , 'dont_notify' : should_notify }
return self . _post ( 'update_notification_setting' , params ) |
def request_update ( self , context ) :
"""Requests a sink info update ( sink _ info _ cb is called )""" | pa_operation_unref ( pa_context_get_sink_info_by_name ( context , self . current_sink . encode ( ) , self . _sink_info_cb , None ) ) |
def plot_graph_folium ( G , graph_map = None , popup_attribute = None , tiles = 'cartodbpositron' , zoom = 1 , fit_bounds = True , edge_color = '#333333' , edge_width = 5 , edge_opacity = 1 ) :
"""Plot a graph on an interactive folium web map .
Note that anything larger than a small city can take a long time to p... | # check if we were able to import folium successfully
if not folium :
raise ImportError ( 'The folium package must be installed to use this optional feature.' )
# create gdf of the graph edges
gdf_edges = graph_to_gdfs ( G , nodes = False , fill_edge_geometry = True )
# get graph centroid
x , y = gdf_edges . unary_... |
def disassemble ( nex ) :
"""Given a NumExpr object , return a list which is the program disassembled .""" | rev_opcodes = { }
for op in interpreter . opcodes :
rev_opcodes [ interpreter . opcodes [ op ] ] = op
r_constants = 1 + len ( nex . signature )
r_temps = r_constants + len ( nex . constants )
def getArg ( pc , offset ) :
if sys . version_info [ 0 ] < 3 :
arg = ord ( nex . program [ pc + offset ] )
... |
def guess_path_encoding ( file_path , default = DEFAULT_ENCODING ) :
"""Wrapper to open that damn file for you , lazy bastard .""" | with io . open ( file_path , 'rb' ) as fh :
return guess_file_encoding ( fh , default = default ) |
def dynamicmap_memoization ( callable_obj , streams ) :
"""Determine whether the Callable should have memoization enabled
based on the supplied streams ( typically by a
DynamicMap ) . Memoization is disabled if any of the streams require
it it and are currently in a triggered state .""" | memoization_state = bool ( callable_obj . _stream_memoization )
callable_obj . _stream_memoization &= not any ( s . transient and s . _triggering for s in streams )
try :
yield
except :
raise
finally :
callable_obj . _stream_memoization = memoization_state |
def schema ( self ) :
"""The DQL syntax for creating this item""" | schema = "%s %s %s %s('%s'" % ( self . name , self . data_type , self . index_type , self . key_type , self . index_name , )
if self . includes is not None :
schema += ", ["
schema += ", " . join ( ( "'%s'" % i for i in self . includes ) )
schema += "]"
return schema + ")" |
def getChangeSets ( self ) :
"""Get all the ChangeSets of this workitem
: return : a : class : ` list ` contains all the
: class : ` rtcclient . models . ChangeSet ` objects
: rtype : list""" | changeset_tag = ( "rtc_cm:com.ibm.team.filesystem.workitems." "change_set.com.ibm.team.scm.ChangeSet" )
return ( self . rtc_obj . _get_paged_resources ( "ChangeSet" , workitem_id = self . identifier , customized_attr = changeset_tag , page_size = "10" ) ) |
def _print_message ( self , text , fd = None ) :
'''Note : this overrides an existing method in ArgumentParser''' | # Since we have the async - > sync - > async problem , queue up and print at exit
self . root . mesgs . extend ( text . split ( '\n' ) ) |
def signature_validate_single ( signature , error = None ) :
"is signature a single valid type ." | error , my_error = _get_error ( error )
result = dbus . dbus_signature_validate_single ( signature . encode ( ) , error . _dbobj ) != 0
my_error . raise_if_set ( )
return result |
async def read ( self , n = None ) :
"""Read all content""" | if self . _streamed :
return b''
buffer = [ ]
async for body in self :
buffer . append ( body )
return b'' . join ( buffer ) |
def to_dlpack_for_read ( data ) :
"""Returns a reference view of NDArray that represents as DLManagedTensor until
all previous write operations on the current array are finished .
Parameters
data : NDArray
input data .
Returns
PyCapsule ( the pointer of DLManagedTensor )
a reference view of NDArray th... | data . wait_to_read ( )
dlpack = DLPackHandle ( )
check_call ( _LIB . MXNDArrayToDLPack ( data . handle , ctypes . byref ( dlpack ) ) )
return ctypes . pythonapi . PyCapsule_New ( dlpack , _c_str_dltensor , _c_dlpack_deleter ) |
def list_entries ( self , projects , filter_ = None , order_by = None , page_size = None , page_token = None ) :
"""Return a page of log entry resources .
See
https : / / cloud . google . com / logging / docs / reference / v2 / rest / v2 / entries / list
: type projects : list of strings
: param projects : ... | extra_params = { "projectIds" : projects }
if filter_ is not None :
extra_params [ "filter" ] = filter_
if order_by is not None :
extra_params [ "orderBy" ] = order_by
if page_size is not None :
extra_params [ "pageSize" ] = page_size
path = "/entries:list"
# We attach a mutable loggers dictionary so that a... |
def orbit ( self , x1_px , y1_px , x2_px , y2_px ) :
"""Causes the camera to " orbit " around the target point .
This is also called " tumbling " in some software packages .""" | px_per_deg = self . vport_radius_px / float ( self . orbit_speed )
radians_per_px = 1.0 / px_per_deg * np . pi / 180.0
t2p = self . position - self . target
M = Matrix4x4 . rotation_around_origin ( ( x1_px - x2_px ) * radians_per_px , self . ground )
t2p = M * t2p
self . up = M * self . up
right = ( self . up ^ t2p ) .... |
def add_match_rules ( self , match_rules ) :
"""Add the given match rules to the ruleset . Handles single rules or a list of rules .
: param match _ rules : Object representing YAML section from config file
: return :
Example match _ rules object :
[ { ' filename - starts - with ' : ' abc ' } , { ' filename... | if type ( match_rules ) == list :
for r in match_rules :
self . add_match_rule ( r )
else : # Handle a single rule being passed in that ' s not in a list
self . add_match_rule ( match_rules ) |
def get_next_tag ( cls , el ) :
"""Get next sibling tag .""" | sibling = el . next_sibling
while not cls . is_tag ( sibling ) and sibling is not None :
sibling = sibling . next_sibling
return sibling |
def angle_between_vectors ( x , y ) :
"""Compute the angle between vector x and y""" | dp = dot_product ( x , y )
if dp == 0 :
return 0
xm = magnitude ( x )
ym = magnitude ( y )
return math . acos ( dp / ( xm * ym ) ) * ( 180. / math . pi ) |
def load_csv_data ( fname , tag ) :
"""Load data from a comma separated SuperMAG file
Parameters
fname : ( str )
CSV SuperMAG file name
tag : ( str )
Denotes type of file to load . Accepted types are ' indices ' , ' all ' ,
' stations ' , and ' ' ( for just magnetometer measurements ) .
Returns
data... | import re
if tag == "stations" : # Because there may be multiple operators , the default pandas reader
# cannot be used .
ddict = dict ( )
dkeys = list ( )
date_list = list ( )
# Open and read the file
with open ( fname , "r" ) as fopen :
dtime = pds . datetime . strptime ( fname . split ( "... |
async def running ( self ) :
"""Start websocket connection .""" | url = 'http://{}:{}' . format ( self . host , self . port )
try :
async with self . session . ws_connect ( url ) as ws :
self . state = STATE_RUNNING
async for msg in ws :
if self . state == STATE_STOPPED :
break
elif msg . type == aiohttp . WSMsgType . TEXT :... |
def errors_handler ( self , * custom_filters , exception = None , run_task = None , ** kwargs ) :
"""Decorator for errors handler
: param exception : you can make handler for specific errors type
: param run _ task : run callback in task ( no wait results )
: return :""" | def decorator ( callback ) :
self . register_errors_handler ( self . _wrap_async_task ( callback , run_task ) , * custom_filters , exception = exception , ** kwargs )
return callback
return decorator |
def after_model_change ( self , form , model , is_created ) :
"""Save model .""" | super ( KnowledgeAdmin , self ) . after_model_change ( form , model , is_created )
from invenio_collections . models import Collection
if form . kbtype . data == KnwKB . KNWKB_TYPES [ 'dynamic' ] :
id_collection = form . id_collection . data or None
collection = Collection . query . filter_by ( id = id_collecti... |
def interpolate_data ( self , data , limit , method ) :
"""Interpolate dataframe .
Parameters
data : pd . DataFrame ( )
Dataframe to interpolate
limit : int
Interpolation limit .
method : str
Interpolation method .
Returns
pd . DataFrame ( )
Dataframe containing interpolated data""" | data = data . interpolate ( how = "index" , limit = limit , method = method )
return data |
def get_sequence ( self , chrom , start , end , strand = '+' , indexing = ( - 1 , 0 ) ) :
"""chromosome is entered relative to the file it was built with , so it can be ' chr11 ' or ' 11 ' ,
start / end are coordinates , which default to python style [ 0,1 ) internally . So positions should be
entered with ( 1,... | try :
divisor = int ( self . sequence_index [ chrom ] [ 2 ] )
except KeyError :
self . open_fasta_index ( )
try :
divisor = int ( self . sequence_index [ chrom ] [ 2 ] )
except KeyError :
sys . stderr . write ( "%s cannot be found within the fasta index file.\n" % chrom )
return ... |
def scipy_solve_symm_block_tridiag ( H_diag , H_upper_diag , v , ab = None ) :
"""use scipy . linalg . solve _ banded to solve a symmetric block tridiagonal system
see https : / / docs . scipy . org / doc / scipy / reference / generated / scipy . linalg . solveh _ banded . html""" | from scipy . linalg import solveh_banded
ab = convert_block_tridiag_to_banded ( H_diag , H_upper_diag ) if ab is None else ab
x = solveh_banded ( ab , v . ravel ( ) , lower = True )
return x . reshape ( v . shape ) |
def merge_obs ( self ) :
"""Match forecasts and observations .""" | for model_type in self . model_types :
self . matched_forecasts [ model_type ] = { }
for model_name in self . model_names [ model_type ] :
self . matched_forecasts [ model_type ] [ model_name ] = pd . merge ( self . forecasts [ model_type ] [ model_name ] , self . obs , right_on = "Step_ID" , how = "lef... |
def create_instance ( self , ** kwargs ) :
"""Creates a new virtual server instance .
. . warning : :
This will add charges to your account
Example : :
new _ vsi = {
' domain ' : u ' test01 . labs . sftlyr . ws ' ,
' hostname ' : u ' minion05 ' ,
' datacenter ' : u ' hkg02 ' ,
' flavor ' : ' BL1_1X2... | tags = kwargs . pop ( 'tags' , None )
inst = self . guest . createObject ( self . _generate_create_dict ( ** kwargs ) )
if tags is not None :
self . set_tags ( tags , guest_id = inst [ 'id' ] )
return inst |
def _register_template ( cls , template_bytes ) :
'''Registers the template for the widget and hooks init _ template''' | # This implementation won ' t work if there are nested templates , but
# we can ' t do that anyways due to PyGObject limitations so it ' s ok
if not hasattr ( cls , 'set_template' ) :
raise TypeError ( "Requires PyGObject 3.13.2 or greater" )
cls . set_template ( template_bytes )
bound_methods = set ( )
bound_widge... |
def recvall ( self , timeout = 0.5 ) :
"""Receive the RCON command response
: param timeout : The timeout between consequent data receive
: return str : The RCON command response with header stripped out""" | response = ''
self . socket . setblocking ( False )
start = time . time ( )
while True :
if response and time . time ( ) - start > timeout :
break
elif time . time ( ) - start > timeout * 2 :
break
try :
data = self . socket . recv ( 4096 )
if data :
response += d... |
def inheritanceTree ( self ) :
"""Returns the inheritance tree for this schema , traversing up the hierarchy for the inherited schema instances .
: return : < generator >""" | inherits = self . inherits ( )
while inherits :
ischema = orb . system . schema ( inherits )
if not ischema :
raise orb . errors . ModelNotFound ( schema = inherits )
yield ischema
inherits = ischema . inherits ( ) |
def _get_neighbor_conf ( neigh_ip_address ) :
"""Returns neighbor configuration for given neighbor ip address .
Raises exception if no neighbor with ` neigh _ ip _ address ` exists .""" | neigh_conf = CORE_MANAGER . neighbors_conf . get_neighbor_conf ( neigh_ip_address )
if not neigh_conf :
raise RuntimeConfigError ( desc = 'No Neighbor configuration with IP' ' address %s' % neigh_ip_address )
assert isinstance ( neigh_conf , NeighborConf )
return neigh_conf |
def get_distance ( F , x ) :
"""Helper function for margin - based loss . Return a distance matrix given a matrix .""" | n = x . shape [ 0 ]
square = F . sum ( x ** 2.0 , axis = 1 , keepdims = True )
distance_square = square + square . transpose ( ) - ( 2.0 * F . dot ( x , x . transpose ( ) ) )
# Adding identity to make sqrt work .
return F . sqrt ( distance_square + F . array ( np . identity ( n ) ) ) |
def IsAllSpent ( self ) :
"""Flag indicating if all balance is spend .
Returns :
bool :""" | for item in self . Items :
if item == CoinState . Confirmed :
return False
return True |
def is_identity ( self ) :
"""If ` self ` is I , returns True , otherwise False .""" | if not self . terms :
return True
return len ( self . terms ) == 1 and not self . terms [ 0 ] . ops and self . terms [ 0 ] . coeff == 1.0 |
def evaluate ( self , values ) :
"""Evaluate the " OR " expression
Check if the left " or " right expression
evaluate to True .""" | return self . left . evaluate ( values ) or self . right . evaluate ( values ) |
def _LookUpSeasonDirectory ( self , showID , showDir , seasonNum ) :
"""Look up season directory . First attempt to find match from database ,
otherwise search TV show directory . If no match is found in the database
the user can choose to accept a match from the TV show directory , enter
a new directory name... | goodlogging . Log . Info ( "RENAMER" , "Looking up season directory for show {0}" . format ( showID ) )
goodlogging . Log . IncreaseIndent ( )
# Look up existing season folder from database
seasonDirName = self . _db . SearchSeasonDirTable ( showID , seasonNum )
if seasonDirName is not None :
goodlogging . Log . In... |
def get_info ( self , security_symbols , info_field_codes ) :
"""Queries data from a / < security _ type > / info endpoint .
Args :
security _ symbols ( list ) : List of string symbols
info _ field _ codes ( list ) : List of string info field codes
Returns :
dict of the decoded json from server response .... | security_symbols = self . _str_or_list ( security_symbols )
info_field_codes = self . _str_or_list ( info_field_codes )
url_path = self . _build_url_path ( security_symbols , 'info' , info_field_codes )
return self . _get_data ( url_path , None ) |
def find_usage ( self ) :
"""Determine the current usage for each limit of this service ,
and update corresponding Limit via
: py : meth : ` ~ . AwsLimit . _ add _ current _ usage ` .""" | logger . debug ( "Checking usage for service %s" , self . service_name )
self . connect ( )
self . connect_resource ( )
for lim in self . limits . values ( ) :
lim . _reset_usage ( )
self . _find_usage_instances ( )
self . _find_usage_networking_sgs ( )
self . _find_usage_networking_eips ( )
self . _find_usage_netw... |
def pandas_dataframe_to_unit_arrays ( df , column_units = None ) :
"""Attach units to data in pandas dataframes and return united arrays .
Parameters
df : ` pandas . DataFrame `
Data in pandas dataframe .
column _ units : dict
Dictionary of units to attach to columns of the dataframe . Overrides
the uni... | if not column_units :
try :
column_units = df . units
except AttributeError :
raise ValueError ( 'No units attribute attached to pandas ' 'dataframe and col_units not given.' )
# Iterate through columns attaching units if we have them , if not , don ' t touch it
res = { }
for column in df :
... |
def sanity_check_ir_blocks_from_frontend ( ir_blocks , query_metadata_table ) :
"""Assert that IR blocks originating from the frontend do not have nonsensical structure .
Args :
ir _ blocks : list of BasicBlocks representing the IR to sanity - check
Raises :
AssertionError , if the IR has unexpected structu... | if not ir_blocks :
raise AssertionError ( u'Received no ir_blocks: {}' . format ( ir_blocks ) )
_sanity_check_fold_scope_locations_are_unique ( ir_blocks )
_sanity_check_no_nested_folds ( ir_blocks )
_sanity_check_query_root_block ( ir_blocks )
_sanity_check_output_source_follower_blocks ( ir_blocks )
_sanity_check... |
def yield_sequences_in_list ( paths ) :
"""Yield the discrete sequences within paths . This does not try to
determine if the files actually exist on disk , it assumes you already
know that .
Args :
paths ( list [ str ] ) : a list of paths
Yields :
: obj : ` FileSequence ` :""" | seqs = { }
_check = DISK_RE . match
for match in ifilter ( None , imap ( _check , imap ( utils . asString , paths ) ) ) :
dirname , basename , frame , ext = match . groups ( )
if not basename and not ext :
continue
key = ( dirname , basename , ext )
seqs . setdefault ( key , set ( ) )
if fra... |
def register_deliver_command ( self , deliver_func ) :
"""Add ' deliver ' command for transferring a project to another user . ,
: param deliver _ func : function to run when user choses this option""" | description = "Initiate delivery of a project to another user. Removes other user's current permissions. " "Send message to D4S2 service to send email and allow access to the project once user " "acknowledges receiving the data."
deliver_parser = self . subparsers . add_parser ( 'deliver' , description = description )
... |
def _get_gecos ( name ) :
'''Retrieve GECOS field info and return it in dictionary form''' | gecos_field = pwd . getpwnam ( name ) . pw_gecos . split ( ',' , 3 )
if not gecos_field :
return { }
else : # Assign empty strings for any unspecified trailing GECOS fields
while len ( gecos_field ) < 4 :
gecos_field . append ( '' )
return { 'fullname' : six . text_type ( gecos_field [ 0 ] ) , 'room... |
def is_convex ( self ) :
"""Check if a mesh is convex or not .
Returns
is _ convex : bool
Is mesh convex or not""" | if self . is_empty :
return False
is_convex = bool ( convex . is_convex ( self ) )
return is_convex |
def get_zone ( self , zone_id , records = True ) :
"""Get a zone and its records .
: param zone : the zone name
: returns : A dictionary containing a large amount of information about
the specified zone .""" | mask = None
if records :
mask = 'resourceRecords'
return self . service . getObject ( id = zone_id , mask = mask ) |
def step_command_output_should_contain_text ( context , text ) :
'''EXAMPLE :
Then the command output should contain " TEXT "''' | expected_text = text
if "{__WORKDIR__}" in expected_text or "{__CWD__}" in expected_text :
expected_text = textutil . template_substitute ( text , __WORKDIR__ = posixpath_normpath ( context . workdir ) , __CWD__ = posixpath_normpath ( os . getcwd ( ) ) )
actual_output = context . command_result . output
with on_ass... |
def image_augmentation ( images , do_colors = False , crop_size = None ) :
"""Image augmentation : cropping , flipping , and color transforms .""" | if crop_size is None :
crop_size = [ 299 , 299 ]
images = tf . random_crop ( images , crop_size + [ 3 ] )
images = tf . image . random_flip_left_right ( images )
if do_colors : # More augmentation , but might be slow .
images = tf . image . random_brightness ( images , max_delta = 32. / 255. )
images = tf .... |
def postbuild_arch ( self , arch ) :
'''Run any post - build tasks for the Recipe . By default , this checks if
any postbuild _ archname methods exist for the archname of the
current architecture , and runs them if so .''' | postbuild = "postbuild_{}" . format ( arch . arch )
if hasattr ( self , postbuild ) :
getattr ( self , postbuild ) ( ) |
def OSIncludes ( self ) :
"""Microsoft Windows SDK Include""" | include = os . path . join ( self . si . WindowsSdkDir , 'include' )
if self . vc_ver <= 10.0 :
return [ include , os . path . join ( include , 'gl' ) ]
else :
if self . vc_ver >= 14.0 :
sdkver = self . _sdk_subdir
else :
sdkver = ''
return [ os . path . join ( include , '%sshared' % sdk... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.