rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
cls.dprint("%s: layout=%s" % (url, layout))
cls.dprint("%s: layout=%s" % (unicode(url), unicode(layout)))
def getLayoutSubsequent(cls, major_mode_keyword, url): #dprint("getLayoutSubsequent") ukey = unicode(url).encode("utf-8") if major_mode_keyword in cls.layout: try: key = str(url) # Convert old style string keyword to unicode keyword if ukey != key and key in cls.layout[major_mode_keyword]: cls.layout[major_mode_keyword][ukey] = cls.layout[major_mode_keyword][key] del cls.layout[major_mode_keyword][key] except UnicodeEncodeError: pass try: layout = cls.layout[major_mode_keyword][ukey] except KeyError: layout = {} cls.dprint("%s: layout=%s" % (url, layout)) return layout
if action.keyboard is None: keyboard = "" else: keyboard = str(action.keyboard)
keyboard = action.keyboard if keyboard is None: keyboard = "" else: keyboard = str(keyboard)
def __cmp__(self, other): return cmp(self.name, other.name)
self.pid = wx.Execute(self.cmd, wx.EXEC_ASYNC, self.process)
if wx.Platform != '__WXMSW__': flag = wx.EXEC_ASYNC else: flag = wx.EXEC_NOHIDE self.pid = wx.Execute(self.cmd, flag, self.process)
def run(self, text=""): assert self.dprint("Running %s in %s" % (self.cmd, self.working_dir)) savecwd = os.getcwd() try: os.chdir(self.working_dir) self.process = wx.Process(self.handler) self.process.Redirect(); self.pid = wx.Execute(self.cmd, wx.EXEC_ASYNC, self.process) finally: os.chdir(savecwd) if self.pid==0: assert self.dprint("startup failed") self.process = None wx.CallAfter(self.jobout.startupFailureCallback, self) else: wx.CallAfter(self.jobout.startupCallback, self) size = len(text) fh = self.process.GetOutputStream() assert self.dprint("sending text size=%d to %s" % (size,fh)) # sending large chunks of text to a process's stdin would sometimes # freeze, but breaking up into < 1024 byte pieces seemed to work # on all platforms if size > 1000: for i in range(0,size,1000): last = i+1000 if last>size: last=size assert self.dprint("sending text[%d:%d] to %s" % (i,last,fh)) fh.write(text[i:last]) assert self.dprint("last write = %s" % str(fh.LastWrite())) elif len(text) > 0: fh.write(text) self.process.CloseOutput() self.stdout = self.process.GetInputStream() self.stderr = self.process.GetErrorStream()
return attrs.st_mtime @classmethod def get_mtime(cls, ref): attrs = cls._stat(ref) return attrs.st_mtime
return datetime.fromtimestamp(attrs.st_mtime) get_ctime = get_mtime
def get_mtime(cls, ref): attrs = cls._stat(ref) return attrs.st_mtime
return attrs.st_atime
return datetime.fromtimestamp(attrs.st_atime)
def get_atime(cls, ref): attrs = cls._stat(ref) return attrs.st_atime
self._spelling_debug = True
self._spelling_debug = False
def __init__(self, stc, *args, **kwargs): """Mixin must be initialized using this constructor. Keyword arguments are also available instead of calling the convenience functions. For L{setIndicator}, use C{indicator}, C{indicator_color}, and {indicator_style}; for L{setLanguage}, use C{language}; and for L{setMinimumWordSize}, use C{min_word_size}. See the descriptions of those methods for more info. @kwarg language: default language string recognized by enchant (e.g. "en_US", "kr_KR", etc. If a default language isn't explicitly here, the default language is taken from the class method L{setDefaultLanguage} @kwarg check_region: optional function to specify if the region should be spell checked. Function should return True if the position should be spell-checked; False if it doesn't make sense to spell check that part of the document. Function should be a bound method to the STC. @kwarg idle_count: number of idle events that have to occur before an idle event is actually processed. This reduces processor usage by only processing one out of every idle_count events. """ self.stc = stc self.setIndicator(kwargs.get('indicator', 2), kwargs.get('indicator_color', "#FF0000"), kwargs.get('indicator_style', wx.stc.STC_INDIC_SQUIGGLE)) self.setMinimumWordSize(kwargs.get('min_word_size', 3)) if 'language' in kwargs: self.setDefaultLanguage(kwargs['language']) if 'check_region' in kwargs: self._spell_check_region = kwargs['check_region'] else: self._spell_check_region = lambda s: True if 'idle_count' in kwargs: self._num_idle_ticks = kwargs['idle_count'] else: self._num_idle_ticks = 10 self._idle_ticks = 0 self._spelling_debug = True self._spelling_last_idle_line = -1 self.dirty_range_count_per_idle = 5 self._no_update = False self._last_block = -1 self.clearDirtyRanges()
list.InsertSizedColumn(0, "URL", min=100, greedy=False)
list.InsertSizedColumn(0, "File", min=100, greedy=False)
def createColumns(self, list): list.InsertSizedColumn(0, "URL", min=100, greedy=False) list.InsertSizedColumn(1, "Line", min=10, greedy=False) list.InsertSizedColumn(2, "Match", min=300, greedy=True)
return (unicode(item.url), item.line, unicode(item.text))
return (unicode(item.short), item.line, unicode(item.text), item.url)
def getItemRawValues(self, index, item): return (unicode(item.url), item.line, unicode(item.text))
self.frame.open(values[0], options={'line':values[1] - 1})
self.frame.open(values[3], options={'line':values[1] - 1})
def OnItemActivated(self, evt): index = evt.GetIndex() orig_index = self.list.GetItemData(index) values = self.list.itemDataMap[orig_index] dprint(values) self.frame.open(values[0], options={'line':values[1] - 1})
dprint("no selection; cursor at %s" % start)
self.dprint("no selection; cursor at %s" % start)
def action(self, index=-1, multiplier=1): s = self.mode # FIXME: Because the autoindenter depends on the styling information, # need to make sure the document is up to date. But, is this call to # style the entire document fast enough in practice, or will it have # to be optimized? s.Colourise(0, s.GetTextLength())
dprint("selection: %s - %s" % (start, end))
self.dprint("selection: %s - %s" % (start, end))
def action(self, index=-1, multiplier=1): s = self.mode # FIXME: Because the autoindenter depends on the styling information, # need to make sure the document is up to date. But, is this call to # style the entire document fast enough in practice, or will it have # to be optimized? s.Colourise(0, s.GetTextLength())
except re.error:
self.error = "" except re.error, errmsg:
def __init__(self, string, match_case): try: if not match_case: flags = re.IGNORECASE else: flags = 0 self.cre = re.compile(string, flags) except re.error: self.cre = None self.last_match = None
return bool(self.cre)
return bool(self.string) and bool(self.cre) def getErrorString(self): if len(self.string) == 0: return "Search error: search string is blank" return "Regular expression error: %s" % self.error
def isValid(self): return bool(self.cre)
self.setStatusText("Invalid search string.")
if hasattr(matcher, "getErrorString"): error = matcher.getErrorString() else: error = "Invalid search string." self.setStatusText(error) self.showSearchButton(False)
def OnStartSearch(self, evt): if not self.isSearchRunning(): self.showSearchButton(True) method = self.buffer.stc.search_method.option if method.isValid(): status = SearchStatus(self) matcher = self.buffer.stc.search_type.option.getStringMatcher(self.search_text.GetValue()) ignorer = WildcardListIgnorer(self.ignore_filenames.GetValue()) if matcher.isValid(): self.buffer.stc.clearSearchResults() self.buffer.stc.setPrefix(method.getPrefix()) self.resetList() self.status_info.startProgress("Searching...") self.thread = SearchThread(self.buffer.stc, matcher, ignorer, status) self.thread.start() else: self.setStatusText("Invalid search string.") else: self.setStatusText(method.getErrorString())
if not prefix.endswith("/"): prefix += "/"
if not prefix.endswith(os.sep): prefix += os.sep
def getPrefix(self): prefix = unicode(self.pathname) if not prefix.endswith("/"): prefix += "/" return prefix
self.wrapper.spring.clearRadio() self.frame.spring.clearRadio()
try: self.wrapper.spring.clearRadio() self.frame.spring.clearRadio() except wx.PyDeadObjectError: pass
def OnFocus(self, evt): """Callback used to pop down any springtabs. When the major mode loses keyboard focus, the springtabs should be cleared to allow the new focus receiver to display itself. This fails when the major mode never takes keyboard focus at all, in which case a focus-lost event is never generated and this method never gets called. """ self.wrapper.spring.clearRadio() self.frame.spring.clearRadio() evt.Skip()
options = {'byte_order': self.endian}
options['byte_order'] = self.endian
def action(self, index=-1, multiplier=1): filename = self.frame.showSaveAs("Save Image as ENVI", wildcard="BIL (*.bil)|*.bil|BIP (*.bip)|*.bip|BSQ (*.bsq)|*.bsq") if filename: root, ext = os.path.splitext(filename) ext = ext.lower() if ext in ['.bil', '.bip', '.bsq']: handler = HyperspectralFileFormat.getHandlerByName("ENVI") if handler: try: self.mode.showBusy(True) self.mode.status_info.startProgress("Exporting to %s" % filename) wx.GetApp().cooperativeYield() if self.endian: options = {'byte_order': self.endian} handler.export(filename, self.mode.cube, options=options, progress=self.updateProgress) self.mode.status_info.stopProgress("Saved %s" % filename) wx.GetApp().cooperativeYield() finally: self.mode.showBusy(False) else: self.mode.setStatusText("Can't find ENVI handler") else: self.frame.showErrorDialog("Unrecognized file format %s\n\nThe filename extension determines the\ninterleave format. Use a filename extension of\n.bip, .bil, or .bsq" % filename)
list.InsertSizedColumn(0, "File", min=100, greedy=False)
list.InsertSizedColumn(0, "File", min=100, max=250, greedy=False)
def createColumns(self, list): list.InsertSizedColumn(0, "File", min=100, greedy=False) list.InsertSizedColumn(1, "Line", min=10, greedy=False) list.InsertSizedColumn(2, "Match", min=300, greedy=True)
list.InsertSizedColumn(2, "Match", min=300, greedy=True)
list.InsertSizedColumn(2, "Match", min=300, greedy=True, ok_offscreen=True)
def createColumns(self, list): list.InsertSizedColumn(0, "File", min=100, greedy=False) list.InsertSizedColumn(1, "Line", min=10, greedy=False) list.InsertSizedColumn(2, "Match", min=300, greedy=True)
self.raw.tofile(str(url.path))
filename = unicode(url.path) try: self.raw.tofile(filename) except ValueError: fd = open(filename, "wb") flat = self.raw.ravel() size = flat.size start = 0 while start < size: last = start + 10000 if last > size: last = size fd.write(flat[start:last].tostring()) start = last
def save(self, url): if self.mmap: self.mmap.flush() self.mmap.sync() else: self.raw.tofile(str(url.path))
ukey = unicode(url)
ukey = unicode(url).encode("utf-8")
def getLayoutSubsequent(cls, major_mode_keyword, url): #dprint("getLayoutSubsequent") ukey = unicode(url) if major_mode_keyword in cls.layout: try: key = str(url) # Convert old style string keyword to unicode keyword if key in cls.layout[major_mode_keyword]: cls.layout[major_mode_keyword][ukey] = cls.layout[major_mode_keyword][key] del cls.layout[major_mode_keyword][key] except UnicodeEncodeError: pass try: return cls.layout[major_mode_keyword][ukey] except KeyError: return {}
if key in cls.layout[major_mode_keyword]:
if ukey != key and key in cls.layout[major_mode_keyword]:
def getLayoutSubsequent(cls, major_mode_keyword, url): #dprint("getLayoutSubsequent") ukey = unicode(url) if major_mode_keyword in cls.layout: try: key = str(url) # Convert old style string keyword to unicode keyword if key in cls.layout[major_mode_keyword]: cls.layout[major_mode_keyword][ukey] = cls.layout[major_mode_keyword][key] del cls.layout[major_mode_keyword][key] except UnicodeEncodeError: pass try: return cls.layout[major_mode_keyword][ukey] except KeyError: return {}
return cls.layout[major_mode_keyword][ukey]
layout = cls.layout[major_mode_keyword][ukey]
def getLayoutSubsequent(cls, major_mode_keyword, url): #dprint("getLayoutSubsequent") ukey = unicode(url) if major_mode_keyword in cls.layout: try: key = str(url) # Convert old style string keyword to unicode keyword if key in cls.layout[major_mode_keyword]: cls.layout[major_mode_keyword][ukey] = cls.layout[major_mode_keyword][key] del cls.layout[major_mode_keyword][key] except UnicodeEncodeError: pass try: return cls.layout[major_mode_keyword][ukey] except KeyError: return {}
return {}
layout = {} cls.dprint("%s: layout=%s" % (url, layout)) return layout
def getLayoutSubsequent(cls, major_mode_keyword, url): #dprint("getLayoutSubsequent") ukey = unicode(url) if major_mode_keyword in cls.layout: try: key = str(url) # Convert old style string keyword to unicode keyword if key in cls.layout[major_mode_keyword]: cls.layout[major_mode_keyword][ukey] = cls.layout[major_mode_keyword][key] del cls.layout[major_mode_keyword][key] except UnicodeEncodeError: pass try: return cls.layout[major_mode_keyword][ukey] except KeyError: return {}
if self.frame == frame:
if not self.frame: dprint("Frame has been deleted!!! Message was:") dprint(message) dlg = wx.MessageDialog(wx.GetApp().GetTopWindow(), message, "Error message for deleted frame!!!", wx.OK | wx.ICON_EXCLAMATION ) retval=dlg.ShowModal() dlg.Destroy() elif self.frame == frame:
def showError(self, message=None): data = message.data if isinstance(data, tuple) or isinstance(data, list): frame = data[0] text = data[1] else: frame = wx.GetApp().GetTopWindow() text = data if self.frame == frame: paneinfo = frame._mgr.GetPane(self) if self.classprefs.unhide_on_message: if not paneinfo.IsShown(): paneinfo.Show(True) frame._mgr.Update() if message.topic[-1] == 'wrap': columns = 72 import textwrap text = textwrap.fill(text, columns) self.addMessage(text)
StrParam('minor_modes', 'GraphvizView'), ) class GraphvizViewMinorMode(MinorMode, JobOutputMixin, wx.Panel, debugmixin): """Display the graphical view of the DOT file. This displays the graphic image that is represented by the .dot file. It calls the external graphviz program and displays a bitmap version of the graph. """ keyword="GraphvizView" default_classprefs = ( IntParam('best_width', 300), IntParam('best_height', 300), IntParam('min_width', 300), IntParam('min_height', 300),
StrParam('graphic_format', 'png'), StrParam('layout', 'dot'), SupersededParam('output_log')
def action(self, index=-1, multiplier=1): self.frame.open("about:sample.dot")
dotprogs = ['dot', 'neato', 'twopi', 'circo', 'fdp']
def getInterpreterArgs(self): self.dot_output = vfs.reference_with_new_extension(self.buffer.url, self.classprefs.graphic_format) args = "%s -v -T%s -K%s -o%s" % (self.classprefs.interpreter_args, self.classprefs.graphic_format, self.classprefs.layout, str(self.dot_output.path)) return args
def action(self, index=-1, multiplier=1): self.frame.open("about:sample.dot")
@classmethod def worksWithMajorMode(self, mode): if mode.__class__ == GraphvizMode: return True return False def __init__(self, parent, **kwargs): MinorMode.__init__(self, parent, **kwargs) wx.Panel.__init__(self, parent) self.sizer = wx.BoxSizer(wx.VERTICAL) self.SetSizer(self.sizer) buttons = wx.BoxSizer(wx.HORIZONTAL) self.prog = wx.Choice(self, -1, (100, 50), choices = self.dotprogs) self.prog.SetSelection(0) buttons.Add(self.prog, 1, wx.EXPAND) self.regen = wx.Button(self, -1, "Regenerate") self.regen.Bind(wx.EVT_BUTTON, self.OnRegenerate) buttons.Add(self.regen, 1, wx.EXPAND) self.sizer.Add(buttons) self.preview = None self.drawing = BitmapScroller(self) self.sizer.Add(self.drawing, 1, wx.EXPAND) self.process = None self.Bind(wx.EVT_SIZE, self.OnSize) self.Layout() def deletePreHook(self): if self.process is not None: self.process.kill() def busy(self, busy): if busy: cursor = wx.StockCursor(wx.CURSOR_WATCH) else: cursor = wx.StockCursor(wx.CURSOR_DEFAULT) self.SetCursor(cursor) self.drawing.SetCursor(cursor) self.regen.SetCursor(cursor) self.regen.Enable(not busy) self.prog.SetCursor(cursor) self.prog.Enable(not busy) def OnRegenerate(self, event): prog = os.path.normpath(os.path.join(self.mode.classprefs.path,self.prog.GetStringSelection())) assert self.dprint("using %s to run graphviz" % repr(prog)) cmd = "%s -Tpng" % prog ProcessManager().run(cmd, self.mode.buffer.cwd(), self, self.mode.buffer.stc.GetText())
def getJobOutput(self): return self
def action(self, index=-1, multiplier=1): self.frame.open("about:sample.dot")
self.busy(True)
def startupCallback(self, job): self.process = job self.busy(True) self.preview = StringIO()
self.process = None self.busy(False) self.createImage() def createImage(self): assert self.dprint("using image, size=%s" % len(self.preview.getvalue())) if len(self.preview.getvalue())==0: self.mode.setStatusText("Error running graphviz!") return fh = StringIO(self.preview.getvalue()) img = wx.EmptyImage() if img.LoadStream(fh): self.bmp = wx.BitmapFromImage(img) self.mode.setStatusText("Graphviz completed.") else: self.bmp = None self.mode.setStatusText("Invalid image") self.drawing.setBitmap(self.bmp) def OnSize(self, evt): self.Refresh() evt.Skip()
del self.process self.frame.findTabOrOpen(self.dot_output)
def finishedCallback(self, job): assert self.dprint() self.process = None self.busy(False) self.createImage() # Don't call evt.Skip() here because it causes a crash
def getMinorModes(self): yield GraphvizViewMinorMode
def getMajorModes(self): yield GraphvizMode
def download(self, url, path):
def download(url, path):
def download(self, url, path): """ Download url and store it at path """ f = None g = None try: f = urllib.urlopen(url) g = open(path, 'wb') copyobj(f, g) finally: if f: f.close() if g: g.close()
"""
""")
def usage(): sys.stderr.write( "Usage: %s COMMAND\n" % os.path.basename(sys.argv[0])) sys.stderr.write( """
except DBLockDeadlockError, e:
except bdb.DBLockDeadlockError, e:
def with_transaction(self, *args, **kwargs): deadlocks = 0 while True: txn = self.env.txn_begin() try: result = method(self, txn, *args, **kwargs) txn.commit() return result except DBLockDeadlockError, e: txn.abort() deadlocks += 1 if deadlocks < retries: self.log.info("Deadlock detected, retrying") continue else: self.log.error("Deadlock detected, aborting") raise except: txn.abort() raise
def list(self):
def list(self, txn):
def list(self): cur = self.db.cursor(txn) try: result = [] current = cur.first() while current is not None: rec = pickle.loads(current[1]) rec['lfn'] = current[0] result.append(rec) current = cur.next() return result finally: cur.close()
def with_transaction(method):
def with_transaction(method, retries=3):
def with_transaction(method): def with_transaction(self, *args, **kwargs): if len(args)>0 and isinstance(args[0],Database): return method(self, *args, **kwargs) else: txn = self.env.txn_begin() try: result = method(self, txn, *args, **kwargs) txn.commit() return result except: txn.abort() raise return with_transaction
if len(args)>0 and isinstance(args[0],Database): return method(self, *args, **kwargs) else:
deadlocks = 0 while True:
def with_transaction(self, *args, **kwargs): if len(args)>0 and isinstance(args[0],Database): return method(self, *args, **kwargs) else: txn = self.env.txn_begin() try: result = method(self, txn, *args, **kwargs) txn.commit() return result except: txn.abort() raise
class CheckpointThread(Thread):
class DatabaseManagerThread(Thread):
def with_transaction(self, *args, **kwargs): if len(args)>0 and isinstance(args[0],Database): return method(self, *args, **kwargs) else: txn = self.env.txn_begin() try: result = method(self, txn, *args, **kwargs) txn.commit() return result except: txn.abort() raise
self.log = log.get_log("ckpt_thread")
self.log = log.get_log("bdb manager")
def __init__(self, env, interval=300): Thread.__init__(self) self.setDaemon(True) self.log = log.get_log("ckpt_thread") self.env = env self.interval = interval
if bdb.version() > (4,7):
if hasattr(self.env, "log_set_config"):
def __init__(self, path, name, duplicates=False): self.path = path self.dbpath = os.path.join(self.path, name) if not os.path.isdir(self.path): os.makedirs(self.path) self.env = bdb.DBEnv() self.env.set_tx_max(self.max_txns) self.env.set_lk_max_lockers(self.max_txns*2) self.env.set_lk_max_locks(self.max_txns*2) self.env.set_lk_max_objects(self.max_txns*2) self.env.set_flags(bdb.DB_TXN_NOSYNC, True) if bdb.version() > (4,7): self.env.log_set_config(bdb.DB_LOG_AUTO_REMOVE, True) self.env.open(self.path, bdb.DB_CREATE | bdb.DB_INIT_LOCK | bdb.DB_INIT_LOG | bdb.DB_INIT_MPOOL | bdb.DB_INIT_TXN | bdb.DB_RECOVER | bdb.DB_THREAD) self.db = bdb.DB(self.env) if duplicates: self.db.set_flags(bdb.DB_DUPSORT) if bdb.version() > (4,1): txn = self.env.txn_begin() self.db.open(self.dbpath, name, flags=bdb.DB_CREATE|bdb.DB_THREAD, dbtype=bdb.DB_BTREE, txn=txn) txn.commit() else: self.db.open(self.dbpath, name, flags=bdb.DB_CREATE|bdb.DB_THREAD, dbtype=bdb.DB_BTREE)
def lookup(self, lfn): cur = self.db.cursor()
@with_transaction def lookup(self, txn, lfn): cur = self.db.cursor(txn)
def delete(self, txn, lfn, pfn=None): cur = self.db.cursor(txn) try: if pfn is None: current = cur.set(lfn) while current is not None: cur.delete() current = cur.next_dup() else: current = cur.set_both(lfn, pfn) if current is not None: cur.delete() finally: cur.close()
def get(self, lfn): current = self.db.get(lfn)
@with_transaction def get(self, txn, lfn): current = self.db.get(lfn, txn)
def get(self, lfn): current = self.db.get(lfn) if current is not None: return pickle.loads(current) else: return None
@with_transaction
def remove(self, txn, lfn): self.db.delete(lfn, txn)
cur = self.db.cursor()
cur = self.db.cursor(txn)
def list(self): cur = self.db.cursor() try: result = [] current = cur.first() while current is not None: rec = pickle.loads(current[1]) rec['lfn'] = current[0] result.append(rec) current = cur.next() return result finally: cur.close()
def lookup(lfn):
def lookup(self, conn, lfn):
def lookup(lfn): cur = conn.cursor() cur.execute("select pfn from map where lfn=?",(lfn,)) pfns = [] for row in cur.fetchall(): pfns.append(row['pfn']) cur.close() return pfns
current = self.db.get(lfn, txn)
current = self.db.get(lfn, None, txn)
def get(self, txn, lfn): current = self.db.get(lfn, txn) if current is not None: return pickle.loads(current) else: return None
@mockdata.check_http_method def _http_request(self, path, data=None, method=None, **kwargs):
def _http_request(self, path, data=None, method=None):
def __repr__(self): return '<MailmanRESTClient: %s>' % self.host
:param data: POST, PUT or PATCH data to send
:param data: POST oder PUT data to send
def _http_request(self, path, data=None, method=None, **kwargs): """Send an HTTP request. :param path: the path to send the request to :type path: string :param data: POST, PUT or PATCH data to send :type data: dict :param method: the HTTP method; defaults to GET or POST (if data is not None) :type method: string :return: the request content or a status code, depending on the method and if the request was successful :rtype: int, list or dict """ url = self.host + path # Include general header information headers = { 'User-Agent': 'MailmanRESTClient', 'Accept': 'text/plain', } if data is not None: data = urlencode(data) if method is None: if data is None: method = 'GET' else: method = 'POST' method = method.upper() if method == 'POST': headers['Content-type'] = "application/x-www-form-urlencoded" response, content = Http().request(url, method, data, headers) if method == 'GET': if response.status // 100 != 2: return response.status else: return json.loads(content) else: return response.status
data = urlencode(data)
data = urlencode(data, doseq=True) headers['Content-type'] = "application/x-www-form-urlencoded"
def _http_request(self, path, data=None, method=None, **kwargs): """Send an HTTP request. :param path: the path to send the request to :type path: string :param data: POST, PUT or PATCH data to send :type data: dict :param method: the HTTP method; defaults to GET or POST (if data is not None) :type method: string :return: the request content or a status code, depending on the method and if the request was successful :rtype: int, list or dict """ url = self.host + path # Include general header information headers = { 'User-Agent': 'MailmanRESTClient', 'Accept': 'text/plain', } if data is not None: data = urlencode(data) if method is None: if data is None: method = 'GET' else: method = 'POST' method = method.upper() if method == 'POST': headers['Content-type'] = "application/x-www-form-urlencoded" response, content = Http().request(url, method, data, headers) if method == 'GET': if response.status // 100 != 2: return response.status else: return json.loads(content) else: return response.status
if method == 'POST': headers['Content-type'] = "application/x-www-form-urlencoded"
def _http_request(self, path, data=None, method=None, **kwargs): """Send an HTTP request. :param path: the path to send the request to :type path: string :param data: POST, PUT or PATCH data to send :type data: dict :param method: the HTTP method; defaults to GET or POST (if data is not None) :type method: string :return: the request content or a status code, depending on the method and if the request was successful :rtype: int, list or dict """ url = self.host + path # Include general header information headers = { 'User-Agent': 'MailmanRESTClient', 'Accept': 'text/plain', } if data is not None: data = urlencode(data) if method is None: if data is None: method = 'GET' else: method = 'POST' method = method.upper() if method == 'POST': headers['Content-type'] = "application/x-www-form-urlencoded" response, content = Http().request(url, method, data, headers) if method == 'GET': if response.status // 100 != 2: return response.status else: return json.loads(content) else: return response.status
def get_member(self, email_address, fqdn_listname): """Return a member object. :param email_adresses: the email address used :type email_address: string :param fqdn_listname: the mailing list :type fqdn_listname: string :return: a member object :rtype: _Member """ return _Member(self.host, email_address, fqdn_listname) def get_user(self, email_address): """Find and return a user object. :param email_address: one of the user's email addresses :type email: string: :returns: a user object :rtype: _User """ return _User(self.host, email_address)
def get_member(self, email_address, fqdn_listname): """Return a member object. :param email_adresses: the email address used :type email_address: string :param fqdn_listname: the mailing list :type fqdn_listname: string :return: a member object :rtype: _Member """ return _Member(self.host, email_address, fqdn_listname)
@mockdata.add_list_mock_data
def delete_list(self, list_name): fqdn_listname = list_name + '@' + self.info['email_host'] return self._http_request('/3.0/lists/' + fqdn_listname, None, 'DELETE')
def get_member(self, email_address): """Return a member object. :param email_adresses: the email address used :type email_address: string :param fqdn_listname: the mailing list :type fqdn_listname: string :return: a member object :rtype: _Member """ return _Member(self.host, email_address, self.info['fqdn_listname']) def update_list(self, data): """Update the settings for a list. """ return self._http_request('/3.0/lists/' + self.info['fqdn_listname'], data, method='PATCH') def __str__(self): """A string representation of a list. """ return "A list object for the list '%s'." % self.info['fqdn_listname'] @mockdata.add_user_mock_data class _User(MailmanRESTClient): """A user wrapper for the MailmanRESTClient.""" def __init__(self, host, email_address): """Connect to host and get user information. :param host: the host name of the REST API :type host: string :param email_address: email address :type email_address: string :return: a user object :rtype: _User """ super(_User, self).__init__(host) self.email_address = email_address self.info = {} def get_email_addresses(self): """Return a list of all email adresses used by this user. :return: a list of email addresses :rtype: list """ response = self._http_request('/3.0/users/' + self.email_address + '/email_adresses') if type(response) is int: return response elif 'entries' not in response: return [] else: return sorted(response['entries']) def get_lists(self): """Return a list of all mailing list connected to a user. :return: a list of dicts with all mailing lists :rtype: list """ path = '/3.0/users/%s/lists' % self.email_address response = self._http_request(path) if type(response) is int: return response elif 'entries' not in response: return [] else: return sorted(response['entries'], key=itemgetter('fqdn_listname')) def update(self, data=None): """Update user settings.""" if data is None: data = self.info path = '/3.0/users/%s' % (self.email_address) return self._http_request(path, data, method='PATCH') def __str__(self): """A string representation of a member.""" return "A user object for the user '%s'." % self.info['real_name'] @mockdata.add_member_mock_data class _Member(MailmanRESTClient): """A user wrapper for the MailmanRESTClient.""" def __init__(self, host, email_address, fqdn_listname): """Connect to host and get membership information. :param host: the host name of the REST API :type host: string :param email_address: email address :type email_address: string :param fqdn_listname: the mailing list :type fqdn_listname: string :return: a member object :rtype: _Member """ super(_Member, self).__init__(host) self.info = {} self.email_address = email_address self.fqdn_listname = fqdn_listname def update(self, data=None): """Update member settings.""" if data is None: data = self.info path = '/3.0/lists/%s/member/%s' % (self.fqdn_listname, self.email_address) return self._http_request(path, data, method='PATCH') def __str__(self): """A string representation of a member.""" return "A member object for '%s', subscribed to '%s'." \ % (self.email_address, self.fqdn_listname)
def update_config(self, data): """Update the list configuration. :param data: A dict with the config attributes to be updated. :type data: dict :return: the return code of the http request :rtype: integer """ url = '/3.0/lists/%s/config' % self.info['fqdn_listname'] status = self._http_request(url, data, 'PATCH') if status == 200: for key in data: self.config[key] = data[key] return status
def get_members(self): """Get a list of all list members.
print("-> glob_corr=%f\n" % glob_corr)
print("-> glob_corr={}\n".format(glob_corr))
def correl_split_weighted( X , Y , segments ): # expects segments = [(0,i1-1),(i1-1,i2-1),(i2,len-1)] correl = list(); interv = list(); # regr. line coeffs and range glob_corr=0 sum_nb_val=0 for (start,stop) in segments: sum_nb_val = sum_nb_val + stop - start; #if start==stop : # return 0 S_XY= cov( X [start:stop+1], Y [start:stop+1] ) S_X2 = variance( X [start:stop+1] ) S_Y2 = variance( Y [start:stop+1] ) # to compute correlation if S_X2*S_Y2 == 0: return (0,[]) c = S_XY/(sqrt(S_X2)*sqrt(S_Y2)) a = S_XY/S_X2 # regr line coeffs b= avg ( Y[start:stop+1] ) - a * avg( X[start:stop+1] ) print(" range [%d,%d] corr=%f, coeff det=%f [a=%f, b=%f]" % (X[start],X[stop],c,c**2,a, b)) correl.append( (c, stop-start) ); # store correl. coef + number of values (segment length) interv.append( (a,b, X[start],X[stop]) ); for (c,l) in correl: glob_corr = glob_corr + (l/sum_nb_val)*c # weighted product of correlation print('-- %f * %f' % (c,l/sum_nb_val)) print("-> glob_corr=%f\n" % glob_corr) return (glob_corr,interv);
readdata.append( (int(l[1]),float(l[3]) / 2 ) );
readdata.append( (int(l[1]),float(l[3])) );
def correl_split( X , Y , segments ): # expects segments = [(0,i1-1),(i1-1,i2-1),(i2,len-1)] correl = list(); interv = list(); # regr. line coeffs and range glob_corr=1 for (start,stop) in segments: #if start==stop : # return 0 S_XY= cov( X [start:stop+1], Y [start:stop+1] ) S_X2 = variance( X [start:stop+1] ) S_Y2 = variance( Y [start:stop+1] ) # to compute correlation if S_X2*S_Y2 == 0: return (0,[]) c = S_XY/(sqrt(S_X2)*sqrt(S_Y2)) a = S_XY/S_X2 # regr line coeffs b= avg ( Y[start:stop+1] ) - a * avg( X[start:stop+1] ) print(" range [%d,%d] corr=%f, coeff det=%f [a=%f, b=%f]" % (X[start],X[stop],c,c**2,a, b)) correl.append( (c, stop-start) ); # store correl. coef + number of values (segment length) interv.append( (a,b, X[start],X[stop]) ); for (c,l) in correl: glob_corr = glob_corr * c # product of correlation coeffs print("-> glob_corr=%f\n" % glob_corr) return (glob_corr,interv);
print("** OPT: [%d .. %d]" % (i,j)) print("** Product of correl coefs = %f" % (max_glob_corr))
print("** OPT: [%d .. %d] correl coef prod=%f slope: %f x + %f" % (i,j,max_glob_corr,a,b))
def correl_split( X , Y , segments ): # expects segments = [(0,i1-1),(i1-1,i2-1),(i2,len-1)] correl = list(); interv = list(); # regr. line coeffs and range glob_corr=1 for (start,stop) in segments: #if start==stop : # return 0 S_XY= cov( X [start:stop+1], Y [start:stop+1] ) S_X2 = variance( X [start:stop+1] ) S_Y2 = variance( Y [start:stop+1] ) # to compute correlation if S_X2*S_Y2 == 0: return (0,[]) c = S_XY/(sqrt(S_X2)*sqrt(S_Y2)) a = S_XY/S_X2 # regr line coeffs b= avg ( Y[start:stop+1] ) - a * avg( X[start:stop+1] ) print(" range [%d,%d] corr=%f, coeff det=%f [a=%f, b=%f]" % (X[start],X[stop],c,c**2,a, b)) correl.append( (c, stop-start) ); # store correl. coef + number of values (segment length) interv.append( (a,b, X[start],X[stop]) ); for (c,l) in correl: glob_corr = glob_corr * c # product of correlation coeffs print("-> glob_corr=%f\n" % glob_corr) return (glob_corr,interv);
for i in xrange(n):
for i in range(n):
def cov (X, Y): assert len(X) == len(Y) n = len(X) # n=len(X)=len(Y) avg_X = avg(X) avg_Y = avg(Y) S_XY = 0.0 for i in xrange(n): S_XY += (X[i] - avg_X) * (Y[i] - avg_Y) return (S_XY / n)
for i in xrange(n):
for i in range(n):
def variance (X): n = len(X) avg_X = avg (X) S_X2 = 0.0 for i in xrange(n): S_X2 += (X[i] - avg_X) ** 2 return (S_X2 / n)
timings.append(float(l[3]) / links) sizes.append(int(l[1]))
readdata.append( (int(l[1]),float(l[3]) / 2 ) ); sorteddata = sorted( readdata, key=lambda pair: pair[0]) sizes,timings = zip(*sorteddata);
def calibrate (links, latency, bandwidth, sizes, timings): assert len(sizes) == len(timings) if len(sizes) < 2: return None S_XY = cov(sizes, timings) S_X2 = variance(sizes) a = S_XY / S_X2 b = avg(timings) - a * avg(sizes) return (b * 1e-6) / (latency * links), 1e6 / (a * bandwidth)
for i in xrange(5, len(sys.argv)): limits += [idx for idx in xrange(len(sizes)) if sizes[idx] == int(sys.argv[i])]
for i in range(5, len(sys.argv)): limits += [idx for idx in range(len(sizes)) if sizes[idx] == int(sys.argv[i])]
def calibrate (links, latency, bandwidth, sizes, timings): assert len(sizes) == len(timings) if len(sizes) < 2: return None S_XY = cov(sizes, timings) S_X2 = variance(sizes) a = S_XY / S_X2 b = avg(timings) - a * avg(sizes) return (b * 1e-6) / (latency * links), 1e6 / (a * bandwidth)
requires=("gtk (>=2.12.0)",),
requires=("gtk",),
def run(self): """Build tarballs and create additional files.""" if os.path.isfile("ChangeLog"): os.remove("ChangeLog") os.system("tools/generate-change-log > ChangeLog") assert os.path.isfile("ChangeLog") assert open("ChangeLog", "r").read().strip() distutils.command.sdist.sdist.run(self) basename = "nfoview-%s" % self.__version tarballs = os.listdir(self.dist_dir) os.chdir(self.dist_dir) # Compare tarball contents with working copy. temp_dir = tempfile.gettempdir() test_dir = os.path.join(temp_dir, basename) tobj = tarfile.open(tarballs[-1], "r") for member in tobj.getmembers(): tobj.extract(member, temp_dir) log.info("comparing tarball (tmp) with working copy (../..)") os.system('diff -qr -x ".*" -x "*.pyc" ../.. %s' % test_dir) response = raw_input("Are all files in the tarball [Y/n]? ") if response.lower() == "n": raise SystemExit("Must edit MANIFEST.in.") shutil.rmtree(test_dir) # Create extra distribution files. log.info("calculating md5sums") os.system("md5sum * > %s.md5sum" % basename) log.info("creating '%s.changes'" % basename) source = os.path.join("..", "..", "ChangeLog") shutil.copyfile(source, "%s.changes" % basename) log.info("creating '%s.news'" % basename) source = os.path.join("..", "..", "NEWS") shutil.copyfile(source, "%s.news" % basename) for tarball in tarballs: log.info("signing '%s'" % tarball) os.system("gpg --detach %s" % tarball)
while not lines[-1]:
while lines and not lines[-1]:
def _read_file(self, path, encoding=None): """Read and return the text of the NFO file.
run_command_or_warn(("update-desktop-database", directory))
run_command_or_warn('update-desktop-database "%s"' % directory)
def run(self): """Install everything and update the desktop file database.""" install.run(self) get_command_obj = self.distribution.get_command_obj root = get_command_obj("install").root data_dir = get_command_obj("install_data").install_dir # Assume we're actually installing if --root was not given. if (root is not None) or (data_dir is None): return directory = os.path.join(data_dir, "share", "applications") log.info("updating desktop database in '%s'" % directory) run_command_or_warn(("update-desktop-database", directory))
self.original.tap( x=x, y=y, z=self.z2(z), self.z2(zretract), depth, standoff, dwell_bottom, pitch, stoppos, spin_in, spin_out, tap_mode, direction)
self.original.tap( x, y, self.z2(z), self.z2(zretract), depth, standoff, dwell_bottom, pitch, stoppos, spin_in, spin_out, tap_mode, direction)
def tap(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None, tap_mode=None, direction=None): self.original.tap( x=x, y=y, z=self.z2(z), self.z2(zretract), depth, standoff, dwell_bottom, pitch, stoppos, spin_in, spin_out, tap_mode, direction)
move = False
no_move = True
def Parse(self, name, oname=None): self.files_open(name,oname) #self.begin_ncblock() #self.begin_path(None) #self.add_line(z=500) #self.end_path() #self.end_ncblock() path_col = None f = None arc = 0
drill = True; move = False; path_col = "feed"; col = "feed";
drill = True no_move = True path_col = "feed" col = "feed"
def Parse(self, name, oname=None): self.files_open(name,oname) #self.begin_ncblock() #self.begin_path(None) #self.add_line(z=500) #self.end_path() #self.end_ncblock() path_col = None f = None arc = 0
move = False;
no_move = True
def Parse(self, name, oname=None): self.files_open(name,oname) #self.begin_ncblock() #self.begin_path(None) #self.add_line(z=500) #self.end_path() #self.end_ncblock() path_col = None f = None arc = 0
drill = True; move = False;
drill = True no_move = True
def Parse(self, name, oname=None): self.files_open(name,oname) #self.begin_ncblock() #self.begin_path(None) #self.add_line(z=500) #self.end_path() #self.end_ncblock() path_col = None f = None arc = 0
if (move):
if (move and not no_move):
def Parse(self, name, oname=None): self.files_open(name,oname) #self.begin_ncblock() #self.begin_path(None) #self.add_line(z=500) #self.end_path() #self.end_ncblock() path_col = None f = None arc = 0
if direction == "on": return if roll_on == None: return
if direction == "on": roll_on = None
def add_roll_on(k, roll_on_k, direction, roll_radius, offset_extra, roll_on): if direction == "on": return if roll_on == None: return num_spans = kurve.num_spans(k) if num_spans == 0: return if roll_on == 'auto': sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) vx, vy = kurve.get_span_dir(k, 0, 0) # get start direction if direction == 'right': off_vx = vy off_vy = -vx else: off_vx = -vy off_vy = vx rollstartx = sx + off_vx * roll_radius - vx * roll_radius rollstarty = sy + off_vy * roll_radius - vy * roll_radius else: rollstartx, rollstarty = roll_on sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) if sx == rollstartx and sy == rollstarty: return vx, vy = kurve.get_span_dir(k, 0, 0) # get start direction rcx, rcy, rdir = kurve.tangential_arc(sx, sy, -vx, -vy, rollstartx, rollstarty) rdir = -rdir # because the tangential_arc was used in reverse # add a start roll on point kurve.add_point(roll_on_k, 0, rollstartx, rollstarty, 0, 0) # add the roll on arc kurve.add_point(roll_on_k, rdir, sx, sy, rcx, rcy) # add the start of the original kurve sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) kurve.add_point(roll_on_k, sp, ex, ey, cx, cy)
if roll_on == 'auto': sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0)
sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) if roll_on == None: rollstartx = sx rollstarty = sy elif roll_on == 'auto':
def add_roll_on(k, roll_on_k, direction, roll_radius, offset_extra, roll_on): if direction == "on": return if roll_on == None: return num_spans = kurve.num_spans(k) if num_spans == 0: return if roll_on == 'auto': sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) vx, vy = kurve.get_span_dir(k, 0, 0) # get start direction if direction == 'right': off_vx = vy off_vy = -vx else: off_vx = -vy off_vy = vx rollstartx = sx + off_vx * roll_radius - vx * roll_radius rollstarty = sy + off_vy * roll_radius - vy * roll_radius else: rollstartx, rollstarty = roll_on sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) if sx == rollstartx and sy == rollstarty: return vx, vy = kurve.get_span_dir(k, 0, 0) # get start direction rcx, rcy, rdir = kurve.tangential_arc(sx, sy, -vx, -vy, rollstartx, rollstarty) rdir = -rdir # because the tangential_arc was used in reverse # add a start roll on point kurve.add_point(roll_on_k, 0, rollstartx, rollstarty, 0, 0) # add the roll on arc kurve.add_point(roll_on_k, rdir, sx, sy, rcx, rcy) # add the start of the original kurve sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) kurve.add_point(roll_on_k, sp, ex, ey, cx, cy)
rollstartx, rollstarty = roll_on sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) if sx == rollstartx and sy == rollstarty: return vx, vy = kurve.get_span_dir(k, 0, 0) rcx, rcy, rdir = kurve.tangential_arc(sx, sy, -vx, -vy, rollstartx, rollstarty) rdir = -rdir
rollstartx, rollstarty = roll_on if sx == rollstartx and sy == rollstarty: rdir = 0 rcx = 0 rcy = 0 else: vx, vy = kurve.get_span_dir(k, 0, 0) rcx, rcy, rdir = kurve.tangential_arc(sx, sy, -vx, -vy, rollstartx, rollstarty) rdir = -rdir
def add_roll_on(k, roll_on_k, direction, roll_radius, offset_extra, roll_on): if direction == "on": return if roll_on == None: return num_spans = kurve.num_spans(k) if num_spans == 0: return if roll_on == 'auto': sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) vx, vy = kurve.get_span_dir(k, 0, 0) # get start direction if direction == 'right': off_vx = vy off_vy = -vx else: off_vx = -vy off_vy = vx rollstartx = sx + off_vx * roll_radius - vx * roll_radius rollstarty = sy + off_vy * roll_radius - vy * roll_radius else: rollstartx, rollstarty = roll_on sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) if sx == rollstartx and sy == rollstarty: return vx, vy = kurve.get_span_dir(k, 0, 0) # get start direction rcx, rcy, rdir = kurve.tangential_arc(sx, sy, -vx, -vy, rollstartx, rollstarty) rdir = -rdir # because the tangential_arc was used in reverse # add a start roll on point kurve.add_point(roll_on_k, 0, rollstartx, rollstarty, 0, 0) # add the roll on arc kurve.add_point(roll_on_k, rdir, sx, sy, rcx, rcy) # add the start of the original kurve sp, sx, sy, ex, ey, cx, cy = kurve.get_span(k, 0) kurve.add_point(roll_on_k, sp, ex, ey, cx, cy)
tool_r = tooldiameter / 2 top_r = diameter / 2
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
comment('tool change') tool_change(id=tool_id) spindle(spindle_speed) feedrate_hv(horizontal_feedrate, vertical_feedrate)
tool_r = tooldiameter / 2 top_r = diameter / 2
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
bottom_r = top_r - (math.tan(angle * math.pi / 180) * depth)
comment('tool change') tool_change(id=tool_id) spindle(spindle_speed) feedrate_hv(horizontal_feedrate, vertical_feedrate)
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
if top_r >= bottom_r: top_r = top_r - tool_r bottom_r = bottom_r - tool_r if top_r < bottom_r: top_r = top_r + tool_r bottom_r = bottom_r + tool_r
bottom_r = top_r - (math.tan(angle * math.pi / 180) * depth)
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
if bottom_r < 0: bottom_r = bottom_r * -1 depth = depth - (bottom_r / math.tan(angle * math.pi / 180)) bottom_r = 0
if top_r >= bottom_r: top_r = top_r - tool_r bottom_r = bottom_r - tool_r if top_r < bottom_r: top_r = top_r + tool_r bottom_r = bottom_r + tool_r
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
no better idea) cone_feed = (step_down / math.tan(angle * math.pi / 180)) if angle < 0 : cone_feed = cone_feed * -1 flush_nc()
if bottom_r < 0: bottom_r = bottom_r * -1 depth = depth - (bottom_r / math.tan(angle * math.pi / 180)) bottom_r = 0
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
rapid(x=(x_cen + bottom_r), y=y_cen) rapid(z=z_safe)
cone_feed = (step_down / math.tan(angle * math.pi / 180)) if angle < 0 : cone_feed = cone_feed * -1 flush_nc()
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
loop_feed = 0 while(loop_feed < depth): loop_feed = loop_feed + step_down if loop_feed >= depth: feed(z=(z_cen - depth)) else: feed(z=(z_cen - loop_feed)) arc_ccw(x=(x_cen - bottom_r), y=y_cen, i= -bottom_r, j=0) arc_ccw(x=(x_cen + bottom_r), y=y_cen, i=bottom_r, j=0) feed(z=z_cen)
rapid(x=(x_cen + bottom_r), y=y_cen) rapid(z=z_safe)
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
loop_feed = 0 while(loop_feed < depth): loop_feed = loop_feed + cone_feed if loop_feed >= depth: temp_depth = depth else: temp_depth = loop_feed temp_top_r = bottom_r + (math.tan(angle * math.pi / 180) * temp_depth)
loop_feed = 0 while(loop_feed < depth): loop_feed = loop_feed + step_down if loop_feed >= depth: feed(z=(z_cen - depth)) else: feed(z=(z_cen - loop_feed)) arc_ccw(x=(x_cen - bottom_r), y=y_cen, i= -bottom_r, j=0) arc_ccw(x=(x_cen + bottom_r), y=y_cen, i=bottom_r, j=0) feed(z=z_cen)
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
cutcone(x_cen, y_cen, z_cen, temp_top_r, bottom_r, temp_depth, step_over)
loop_feed = 0 while(loop_feed < depth): loop_feed = loop_feed + cone_feed if loop_feed >= depth: temp_depth = depth else: temp_depth = loop_feed temp_top_r = bottom_r + (math.tan(angle * math.pi / 180) * temp_depth)
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
rapid(z=z_safe)
cutcone(x_cen, y_cen, z_cen, temp_top_r, bottom_r, temp_depth, step_over) rapid(z=z_safe)
def cone(x_cen, y_cen, z_cen, tool_id, tooldiameter, spindle_speed, horizontal_feedrate, vertical_feedrate, depth, diameter, angle, z_safe, step_over, step_down):
def cut_curve(curve, need_rapid, p, rapid_down_to_height, final_depth):
def cut_curve(curve, need_rapid, p, rapid_down_to_height, current_start_depth, final_depth):
def cut_curve(curve, need_rapid, p, rapid_down_to_height, final_depth): prev_p = p first = True for vertex in curve.getVertices(): if need_rapid and first: # rapid across rapid(vertex.p.x, vertex.p.y) ##rapid down rapid(z = rapid_down_to_height) #feed down feed(z = final_depth) first = False else: dc = vertex.c - prev_p if vertex.type == 1: arc_ccw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y) elif vertex.type == -1: arc_cw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y) else: feed(vertex.p.x, vertex.p.y) prev_p = vertex.p return prev_p
rapid(z = rapid_down_to_height)
rapid(z = current_start_depth + rapid_down_to_height)
def cut_curve(curve, need_rapid, p, rapid_down_to_height, final_depth): prev_p = p first = True for vertex in curve.getVertices(): if need_rapid and first: # rapid across rapid(vertex.p.x, vertex.p.y) ##rapid down rapid(z = rapid_down_to_height) #feed down feed(z = final_depth) first = False else: dc = vertex.c - prev_p if vertex.type == 1: arc_ccw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y) elif vertex.type == -1: arc_cw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y) else: feed(vertex.p.x, vertex.p.y) prev_p = vertex.p return prev_p
def cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss):
def cut_curvelist(curve_list, rapid_down_to_height, current_start_depth, depth, clearance_height, keep_tool_down_if_poss):
def cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss): p = area.Point(0, 0) first = True for curve in curve_list: need_rapid = True if first == False: s = curve.FirstVertex().p if keep_tool_down_if_poss == True: # see if we can feed across if feed_possible(p, s): need_rapid = False elif s.x == p.x and s.y == p.y: need_rapid = False if need_rapid: rapid(z = clearance_height) p = cut_curve(curve, need_rapid, p, rapid_down_to_height, depth) first = False rapid(z = clearance_height)
p = cut_curve(curve, need_rapid, p, rapid_down_to_height, depth)
p = cut_curve(curve, need_rapid, p, rapid_down_to_height, current_start_depth, depth)
def cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss): p = area.Point(0, 0) first = True for curve in curve_list: need_rapid = True if first == False: s = curve.FirstVertex().p if keep_tool_down_if_poss == True: # see if we can feed across if feed_possible(p, s): need_rapid = False elif s.x == p.x and s.y == p.y: need_rapid = False if need_rapid: rapid(z = clearance_height) p = cut_curve(curve, need_rapid, p, rapid_down_to_height, depth) first = False rapid(z = clearance_height)
flunkOnFailure=False,
def tools_run_tests(self): self.addStep(ShellCommand( workdir='tools/release/signing', command=['python', 'tests.py'], name='release_signing_tests', )) self.addStep(ShellCommand( workdir='tools/lib/python', env={'PYTHONPATH': WithProperties('%(topdir)s/tools/lib/python')}, name='run_lib_nosetests', command=['nosetests'], )) self.addStep(ShellCommand( workdir='tools/clobberer', flunkOnFailure=False, name='run_clobbberer_test', command=['python', 'test_clobberer.py', 'http://preproduction-master.build.mozilla.org/~cltbld/index.php', '/home/cltbld/public_html/db/clobberer.db'], ))
./bin/pip install Twisted || exit 1;
./bin/pip install Twisted==10.1.0 || exit 1;
def createSummary(self, log): self.parent_class.createSummary(self, log) key = 'pylint-%s' % self.project if not self.build.getProperties().has_key(key): self.setProperty(key, {}) props = self.getProperty(key) for msg, fullmsg in self.MESSAGES.items(): props[fullmsg] = self.getProperty('pylint-%s' % fullmsg) props['total'] = self.getProperty('pylint-total')
self.addStep(RemovePYCs(workdir="."))
self.addStep(ShellCommand( name='rm_pyc', command=['find', '.', '-name', '*.pyc', '-exec', 'rm', '-fv', '{}', ';'], workdir=".", ))
def __init__(self, hgHost, **kwargs): self.parent_class = BuildFactory self.parent_class.__init__(self, **kwargs) self.hgHost = hgHost self.addStep(SetProperty(name='set_topdir', command=['pwd'], property='topdir', workdir='.', )) self.addStep(RemovePYCs(workdir="."))
'PYTHONPATH': WithProperties('%(topdir)s'),
'PYTHONPATH': WithProperties('%(topdir)s:%(topdir)s/tools/lib/python'),
def test_masters(self): self.addStep(ShellCommand(name='test_masters', command=['./test-masters.sh', '-8'], env = { 'PYTHONPATH': WithProperties('%(topdir)s'), 'PATH': WithProperties('%(topdir)s/sandbox/bin:/bin:/usr/bin'), }, workdir="buildbot-configs", ))
flunkOnFailure=False,
def tools_pylint(self): # TODO: move pylintrc to tools self.addStep(PyLintExtended( command='../../../sandbox/bin/pylint --rcfile=../../.pylintrc *', workdir='tools/lib/python', flunkOnFailure=False, name='tools_lib_pylint', project='tools_lib', )) self.addStep(PyLintExtended( command='find buildbot-helpers buildfarm \ clobberer release stage \ -name \'*.py\' -type f -print0 | \ xargs -0 ../sandbox/bin/pylint \ --rcfile=.pylintrc', workdir="tools", env = {'PYTHONPATH': WithProperties('%(topdir)s:%(topdir)s/tools/lib/python')}, flunkOnFailure=False, name='tools_scripts_pylint', project='tools_scripts', ))
else: values[field] = getattr(self, field) values['id'] = self.id return values def _on_change_args(self, args): res = {} values = {} for field, definition in self._fields.iteritems(): if definition['type'] in ('one2many', 'many2many'): values[field] = [x._get_eval() for x in getattr(self, field)]
def _get_eval(self): values = {} for field, definition in self._fields.iteritems(): if definition['type'] in ('one2many', 'many2many'): values[field] = [x.id for x in getattr(self, field) or []] else: values[field] = getattr(self, field) values['id'] = self.id return values
setting.EMAIL_SUBJECT_PREFIX + (_("Configuration for %s/%s") % (d['hostname'], d['ip'])),
settings.EMAIL_SUBJECT_PREFIX + (_("Configuration for %s/%s") % (d['hostname'], d['ip'])),
def generate_image(d): """ Generates an image accoording to given configuration. """ logging.debug(repr(d)) if d['imagebuilder'] not in IMAGEBUILDERS: raise Exception("Invalid imagebuilder specified!") x = OpenWrtConfig() x.setUUID(d['uuid']) x.setOpenwrtVersion(d['openwrt_ver']) x.setArch(d['arch']) x.setPortLayout(d['port_layout']) x.setWifiIface(d['iface_wifi'], d['driver'], d['channel']) x.setWifiAnt(d['rx_ant'], d['tx_ant']) x.setLanIface(d['iface_lan']) x.setNodeType("adhoc") x.setPassword(d['root_pass']) x.setHostname(d['hostname']) x.setIp(d['ip']) x.setSSID(d['ssid']) # Add WAN interface and all subnets if d['wan_dhcp']: x.addInterface("wan", d['iface_wan'], init = True) else: x.addInterface("wan", d['iface_wan'], d['wan_ip'], d['wan_cidr'], d['wan_gw'], init = True) for subnet in d['subnets']: x.addSubnet(str(subnet['iface']), str(subnet['network']), subnet['cidr'], subnet['dhcp'], True) x.setCaptivePortal(d['captive_portal']) if d['vpn']: x.setVpn(d['vpn_username'], d['vpn_password'], d['vpn_mac'], d['vpn_limit']) if d['lan_wifi_bridge']: x.enableLanWifiBridge() if d['lan_wan_switch']: x.switchWanToLan() # Add optional packages for package in d['opt_pkg']: x.addPackage(package) # Cleanup stuff from previous builds os.chdir(WORKDIR) os.system("rm -rf build/files/*") os.system("rm -rf build/%s/bin/*" % d['imagebuilder']) os.mkdir("build/files/etc") x.generate("build/files/etc") if d['only_config']: # Just pack configuration and send it prefix = hashlib.md5(os.urandom(32)).hexdigest()[0:16] tempfile = os.path.join(DESTINATION, prefix + "-config.zip") zip = ZipFile(tempfile, 'w', ZIP_DEFLATED) os.chdir('build/files') for root, dirs, files in os.walk("etc"): for file in files: zip.write(os.path.join(root, file)) zip.close() # Generate checksum f = open(tempfile, 'r') checksum = hashlib.md5(f.read()) f.close() # We can take just first 22 characters as checksums are fixed size and we can reconstruct it filechecksum = urlsafe_b64encode(checksum.digest())[:22] checksum = checksum.hexdigest() result = "%s-%s-config-%s.zip" % (d['hostname'], d['router_name'], filechecksum) destination = os.path.join(DESTINATION, result) os.rename(tempfile, destination) # Send an e-mail t = loader.get_template('generator/email_config.txt') c = Context({ 'hostname' : d['hostname'], 'ip' : d['ip'], 'username' : d['vpn_username'], 'config' : result, 'checksum' : checksum, 'network' : { 'name' : settings.NETWORK_NAME, 'contact' : settings.NETWORK_CONTACT, 'description' : getattr(settings, 'NETWORK_DESCRIPTION', None) } }) send_mail( setting.EMAIL_SUBJECT_PREFIX + (_("Configuration for %s/%s") % (d['hostname'], d['ip'])), t.render(c), settings.EMAIL_IMAGE_GENERATOR_SENDER, [d['email']], fail_silently = False ) else: # Generate full image x.build("build/%s" % d['imagebuilder']) # Read image version try: f = open(glob('%s/build/%s/build_dir/target-*/root-*/etc/version' % (WORKDIR, d['imagebuilder']))[0], 'r') version = f.read().strip().replace('.', '_') f.close() except: version = 'unknown' # Get resulting image files = [] for file, type in d['imagefiles']: file = str(file) source = "%s/build/%s/bin/%s" % (WORKDIR, d['imagebuilder'], file) f = open(source, 'r') checksum = hashlib.md5(f.read()) f.close() # We can take just first 22 characters as checksums are fixed size and we can reconstruct it filechecksum = urlsafe_b64encode(checksum.digest())[:22] checksum = checksum.hexdigest() ext = os.path.splitext(file)[1] result = "%s-%s-%s%s-%s%s" % (d['hostname'], d['router_name'], version, ("-%s" % type if type else ""), filechecksum, ext) destination = os.path.join(DESTINATION, result) os.rename(source, destination) files.append({ 'name' : result, 'checksum' : checksum }) # Send an e-mail t = loader.get_template('generator/email.txt') c = Context({ 'hostname' : d['hostname'], 'ip' : d['ip'], 'username' : d['vpn_username'], 'files' : files, 'network' : { 'name' : settings.NETWORK_NAME, 'contact' : settings.NETWORK_CONTACT, 'description' : getattr(settings, 'NETWORK_DESCRIPTION', None) } }) send_mail( settings.EMAIL_SUBJECT_PREFIX + (_("Router images for %s/%s") % (d['hostname'], d['ip'])), t.render(c), settings.EMAIL_IMAGE_GENERATOR_SENDER, [d['email']], fail_silently = False )
snr = float(signal) / float(noise)
snr = float(signal) - float(noise)
def process_node(node_ip, ping_results, is_duped, peers, varsize_results): """ Processes a single node. @param node_ip: Node's IP address @param ping_results: Results obtained from ICMP ECHO tests @param is_duped: True if duplicate echos received @param peers: Peering info from routing daemon @param varsize_results: Results of ICMP ECHO tests with variable payloads """ transaction.set_dirty() try: n = Node.get_exclusive(ip = node_ip) except Node.DoesNotExist: # This might happen when we were in the middle of a renumbering and # did not yet have access to the node. Then after the node has been # renumbered we gain access, but the IP has been changed. In this # case we must ignore processing of this node. return oldStatus = n.status # Determine node status if ping_results is not None: n.status = NodeStatus.Up n.rtt_min, n.rtt_avg, n.rtt_max, n.pkt_loss = ping_results # Add RTT graph add_graph(n, '', GraphType.RTT, RRARTT, 'Latency', 'latency', n.rtt_avg, n.rtt_min, n.rtt_max) # Add uptime credit if n.uptime_last: n.uptime_so_far = (n.uptime_so_far or 0) + (datetime.now() - n.uptime_last).seconds n.uptime_last = datetime.now() else: n.status = NodeStatus.Visible # Measure packet loss with different packet sizes and generate a graph if ping_results is not None and varsize_results is not None: losses = [n.pkt_loss] + varsize_results add_graph(n, '', GraphType.PacketLoss, RRAPacketLoss, 'Packet Loss', 'packetloss', *losses) if is_duped: n.status = NodeStatus.Duped NodeWarning.create(n, WarningCode.DupedReplies, EventSource.Monitor) # Generate status change events if oldStatus in (NodeStatus.Down, NodeStatus.Pending, NodeStatus.New) and n.status in (NodeStatus.Up, NodeStatus.Visible): if oldStatus in (NodeStatus.New, NodeStatus.Pending): n.first_seen = datetime.now() if n.node_type == NodeType.Wireless: generate_new_node_tweet(n) Event.create_event(n, EventCode.NodeUp, '', EventSource.Monitor) elif oldStatus != NodeStatus.Duped and n.status == NodeStatus.Duped: Event.create_event(n, EventCode.PacketDuplication, '', EventSource.Monitor) # Add olsr peer count graph add_graph(n, '', GraphType.OlsrPeers, RRAOlsrPeers, 'Routing Peers', 'olsrpeers', n.peers) # Add LQ/ILQ graphs if n.peers > 0: lq_avg = ilq_avg = 0.0 for peer in n.get_peers(): lq_avg += float(peer.lq) ilq_avg += float(peer.ilq) lq_graph = add_graph(n, '', GraphType.LQ, RRALinkQuality, 'Average Link Quality', 'lq', lq_avg / n.peers, ilq_avg / n.peers) for peer in n.get_peers(): add_graph(n, peer.dst.ip, GraphType.LQ, RRALinkQuality, 'Link Quality to %s' % peer.dst, 'lq_peer_%s' % peer.dst.pk, peer.lq, peer.ilq, parent = lq_graph) n.last_seen = datetime.now() # Check if we have fetched nodewatcher data info = nodewatcher.fetch_node_info(node_ip) if info is not None and 'general' in info: try: oldUptime = n.uptime or 0 oldChannel = n.channel or 0 oldVersion = n.firmware_version n.firmware_version = info['general']['version'] n.local_time = safe_date_convert(info['general']['local_time']) n.bssid = info['wifi']['bssid'] n.essid = info['wifi']['essid'] n.channel = nodewatcher.frequency_to_channel(info['wifi']['frequency']) n.clients = 0 n.uptime = safe_uptime_convert(info['general']['uptime']) # Treat missing firmware version file as NULL version if n.firmware_version == "missing": n.firmware_version = None # Validate BSSID and ESSID if n.bssid != "02:CA:FF:EE:BA:BE": NodeWarning.create(n, WarningCode.BSSIDMismatch, EventSource.Monitor) try: if n.essid != n.project.ssid: NodeWarning.create(n, WarningCode.ESSIDMismatch, EventSource.Monitor) except Project.DoesNotExist: pass if 'uuid' in info['general']: n.reported_uuid = info['general']['uuid'] if n.reported_uuid and n.reported_uuid != n.uuid: NodeWarning.create(n, WarningCode.MismatchedUuid, EventSource.Monitor) if oldVersion != n.firmware_version: Event.create_event(n, EventCode.VersionChange, '', EventSource.Monitor, data = 'Old version: %s\n New version: %s' % (oldVersion, n.firmware_version)) if oldUptime > n.uptime: Event.create_event(n, EventCode.UptimeReset, '', EventSource.Monitor, data = 'Old uptime: %s\n New uptime: %s' % (oldUptime, n.uptime)) if oldChannel != n.channel and oldChannel != 0: Event.create_event(n, EventCode.ChannelChanged, '', EventSource.Monitor, data = 'Old channel: %s\n New channel %s' % (oldChannel, n.channel)) try: if n.channel != n.profile.channel: NodeWarning.create(n, WarningCode.ChannelMismatch, EventSource.Monitor) except Profile.DoesNotExist: pass if n.has_time_sync_problems(): NodeWarning.create(n, WarningCode.TimeOutOfSync, EventSource.Monitor) if 'errors' in info['wifi']: error_count = safe_int_convert(info['wifi']['errors']) if error_count != n.wifi_error_count and error_count > 0: Event.create_event(n, EventCode.WifiErrors, '', EventSource.Monitor, data = 'Old count: %s\n New count: %s' % (n.wifi_error_count, error_count)) n.wifi_error_count = error_count if 'net' in info: loss_count = safe_int_convert(info['net']['losses']) if loss_count != n.loss_count and loss_count > 1: Event.create_event(n, EventCode.ConnectivityLoss, '', EventSource.Monitor, data = 'Old count: %s\n New count: %s' % (n.loss_count, loss_count)) n.loss_count = loss_count # Check VPN configuration if 'vpn' in info['net']: n.vpn_mac = info['net']['vpn']['mac'] or None try: offset = -3 unit = 1000 if 'Kbit' in info['net']['vpn']['upload_limit']: offset = -4 unit = 1 upload_limit = safe_int_convert(info['net']['vpn']['upload_limit'][:offset]) // unit except TypeError: upload_limit = None if n.vpn_mac and n.vpn_mac != n.vpn_mac_conf: NodeWarning.create(n, WarningCode.VPNMacMismatch, EventSource.Monitor) try: if upload_limit != n.profile.vpn_egress_limit: NodeWarning.create(n, WarningCode.VPNLimitMismatch, EventSource.Monitor) except Profile.DoesNotExist: pass # Parse nodogsplash client information oldNdsStatus = n.captive_portal_status if 'nds' in info: if 'down' in info['nds'] and info['nds']['down'] == '1': n.captive_portal_status = False # Create a node warning when captive portal is down and the node has it # selected in its image generator profile try: if n.project.captive_portal: NodeWarning.create(n, WarningCode.CaptivePortalDown, EventSource.Monitor) except Profile.DoesNotExist: pass else: n.captive_portal_status = True for cid, client in info['nds'].iteritems(): if not cid.startswith('client'): continue try: c = APClient.objects.get(node = n, ip = client['ip']) except APClient.DoesNotExist: c = APClient(node = n) n.clients_so_far += 1 n.clients += 1 c.ip = client['ip'] c.connected_at = safe_date_convert(client['added_at']) c.uploaded = safe_int_convert(client['up']) c.downloaded = safe_int_convert(client['down']) c.last_update = datetime.now() c.save() else: n.captive_portal_status = True # Check for captive portal status change if oldNdsStatus and not n.captive_portal_status: Event.create_event(n, EventCode.CaptivePortalDown, '', EventSource.Monitor) elif not oldNdsStatus and n.captive_portal_status: Event.create_event(n, EventCode.CaptivePortalUp, '', EventSource.Monitor) # Generate a graph for number of wifi cells if 'cells' in info['wifi']: add_graph(n, '', GraphType.WifiCells, RRAWifiCells, 'Nearby Wifi Cells', 'wificells', safe_int_convert(info['wifi']['cells']) or 0) # Update node's MAC address on wifi iface if 'mac' in info['wifi']: n.wifi_mac = info['wifi']['mac'] # Update node's RTS and fragmentation thresholds if 'rts' in info['wifi'] and 'frag' in info['wifi']: n.thresh_rts = safe_int_convert(info['wifi']['rts']) or 2347 n.thresh_frag = safe_int_convert(info['wifi']['frag']) or 2347 # Check node's multicast rate if 'mcast_rate' in info['wifi']: rate = safe_int_convert(info['wifi']['mcast_rate']) if rate != 5500: NodeWarning.create(n, WarningCode.McastRateMismatch, EventSource.Monitor) # Check node's wifi bitrate, level and noise if 'signal' in info['wifi']: bitrate = safe_int_convert(info['wifi']['bitrate']) signal = safe_dbm_convert(info['wifi']['signal']) noise = safe_dbm_convert(info['wifi']['noise']) snr = float(signal) / float(noise) add_graph(n, '', GraphType.WifiBitrate, RRAWifiBitrate, 'Wifi Bitrate', 'wifibitrate', bitrate) add_graph(n, '', GraphType.WifiSignalNoise, RRAWifiSignalNoise, 'Wifi Signal/Noise', 'wifisignalnoise', signal, noise) add_graph(n, '', GraphType.WifiSNR, RRAWifiSNR, 'Wifi Signal/Noise Ratio', 'wifisnr', snr) # Generate a graph for number of clients if 'nds' in info: add_graph(n, '', GraphType.Clients, RRAClients, 'Connected Clients', 'clients', n.clients) # Check for IP shortage wifiSubnet = n.subnet_set.filter(gen_iface_type = IfaceType.WiFi, allocated = True) if len(wifiSubnet) and n.clients > max(0, ipcalc.Network(wifiSubnet[0].subnet, wifiSubnet[0].cidr).size() - 4): Event.create_event(n, EventCode.IPShortage, '', EventSource.Monitor, data = 'Subnet: %s\n Clients: %s' % (wifiSubnet[0], n.clients)) # Record interface traffic statistics for all interfaces for iid, iface in info['iface'].iteritems(): if iid not in ('wifi0', 'wmaster0'): # Check mappings for known wifi interfaces so we can handle hardware changes while # the node is up and not generate useless intermediate graphs try: if n.profile: iface_wifi = n.profile.template.iface_wifi if Template.objects.filter(iface_wifi = iid).count() >= 1: iid = iface_wifi except Profile.DoesNotExist: pass add_graph(n, iid, GraphType.Traffic, RRAIface, 'Traffic - %s' % iid, 'traffic_%s' % iid, iface['up'], iface['down']) # Generate load average statistics if 'loadavg' in info['general']: n.loadavg_1min, n.loadavg_5min, n.loadavg_15min, n.numproc = safe_loadavg_convert(info['general']['loadavg']) add_graph(n, '', GraphType.LoadAverage, RRALoadAverage, 'Load Average', 'loadavg', n.loadavg_1min, n.loadavg_5min, n.loadavg_15min) add_graph(n, '', GraphType.NumProc, RRANumProc, 'Number of Processes', 'numproc', n.numproc) # Generate free memory statistics if 'memfree' in info['general']: n.memfree = safe_int_convert(info['general']['memfree']) buffers = safe_int_convert(info['general'].get('buffers', 0)) cached = safe_int_convert(info['general'].get('cached', 0)) add_graph(n, '', GraphType.MemUsage, RRAMemUsage, 'Memory Usage', 'memusage', n.memfree, buffers, cached) # Generate solar statistics when available if 'solar' in info and all([x in info['solar'] for x in ('batvoltage', 'solvoltage', 'charge', 'state', 'load')]): states = { 'boost' : 1, 'equalize' : 2, 'absorption' : 3, 'float' : 4 } for key, value in info['solar'].iteritems(): if not value.strip(): info['solar'][key] = None add_graph(n, '', GraphType.Solar, RRASolar, 'Solar Monitor', 'solar', info['solar']['batvoltage'], info['solar']['solvoltage'], info['solar']['charge'], states.get(info['solar']['state']), info['solar']['load'] ) # Check for installed package versions (every hour) try: last_pkg_update = n.installedpackage_set.all()[0].last_update except: last_pkg_update = None if not last_pkg_update or last_pkg_update < datetime.now() - timedelta(hours = 1): packages = nodewatcher.fetch_installed_packages(n.ip) or {} # Remove removed packages and update existing package versions for package in n.installedpackage_set.all(): if package.name not in packages: package.delete() else: package.version = packages[package.name] package.last_update = datetime.now() package.save() del packages[package.name] # Add added packages for packageName, version in packages.iteritems(): package = InstalledPackage(node = n) package.name = packageName package.version = version package.last_update = datetime.now() package.save() # Check if all selected optional packages are present in package listing try: missing_packages = [] for package in n.profile.optional_packages.all(): if n.installedpackage_set.filter(name = package.name).count() == 0: missing_packages.append(package.name) if missing_packages: NodeWarning.create(n, WarningCode.OptPackageNotFound, EventSource.Monitor, details = ("Packages missing: %s" % ", ".join(missing_packages))) except Profile.DoesNotExist: pass # Check if DNS works if 'dns' in info: old_dns_works = n.dns_works n.dns_works = info['dns']['local'] == '0' and info['dns']['remote'] == '0' if not n.dns_works: NodeWarning.create(n, WarningCode.DnsDown, EventSource.Monitor) if old_dns_works != n.dns_works: # Generate a proper event when the state changes if n.dns_works: Event.create_event(n, EventCode.DnsResolverRestored, '', EventSource.Monitor) else: Event.create_event(n, EventCode.DnsResolverFailed, '', EventSource.Monitor) except: logging.warning("Failed to interpret nodewatcher data for node '%s (%s)'!" % (n.name, n.ip)) logging.warning(format_exc()) NodeWarning.create(n, WarningCode.NodewatcherInterpretFailed, EventSource.Monitor) n.save() # When GC debugging is enabled perform some more work if getattr(settings, 'MONITOR_ENABLE_GC_DEBUG', None): gc.collect() return os.getpid(), len(gc.get_objects()) return None, None
self.addService('S35', 'misc') self.addService('K35', 'misc')
self.addService('S46', 'misc') self.addService('K46', 'misc')
def __init__(self): """ Class constructor. """ NodeConfig.__init__(self) # Add some basic services self.addService('S35', 'misc') self.addService('K35', 'misc')
f.write('START=35') f.write('\n') f.write('STOP=35') f.write('\n')
f.write('START=46\n') f.write('STOP=46\n')
def __generateMiscScript(self, f): f.write('#!/bin/sh /etc/rc.common\n') f.write('START=35') f.write('\n') f.write('STOP=35') f.write('\n') f.write('start() {\n') # Prevent the time from reseting to far into the past t = datetime.today() f.write('\tif [ ! -f /etc/datetime.save ]; then\n') f.write('\t echo -n "%02d%02d%02d%02d%04d" > /etc/datetime.save\n' % (t.month, t.day, t.hour, t.minute, t.year)) f.write('\tfi\n') f.write('\tDT=`cat /etc/datetime.save`\n') f.write('\tdate $DT\n') f.write('\n') # Allow txtinfo access when selected if 'olsrd-mod-txtinfo' in self.packages: f.write('\tiptables -A INPUT -p tcp --dport 2006 -j ACCEPT\n') f.write('\n') # Set boot_wait to on if it is not set f.write('\tif [ -x /usr/sbin/nvram ]; then\n') f.write('\t\tBOOT_WAIT=`nvram get boot_wait`\n') f.write('\t\t[ "$BOOT_WAIT" != "on" ] && {\n') f.write('\t\t nvram set boot_wait=on\n') f.write('\t\t nvram commit\n') f.write('\t\t}\n') # Set boardflags on WHR-HP-G54 if self.portLayout == 'whr-hp-g54': f.write('\tBOARDFLAGS=`nvram get boardflags`\n') f.write('\t\t[ "$BOARDFLAGS" != "0x3758" ] && {\n') f.write('\t\t nvram set boardflags=0x3758\n') f.write('\t\t nvram commit\n') f.write('\t\t}\n') f.write('\tfi\n') f.write('}\n') f.write('stop() {\n') f.write('\tDT=`date +%m%d%H%M%Y`\n') f.write('\techo $DT > /etc/datetime.save\n') f.write('}\n') f.close() if self.openwrtVersion == "old": # Copy timezone template self.__copyTemplate("general/timezone", os.path.join(self.base, 'TZ'))
fresh_subnet = pool.allocate_subnet()
fresh_subnet = pool.allocate_subnet(prefix_len)
def save(self, user): """ Completes node registration. """ ip = self.cleaned_data.get('ip') project = self.cleaned_data.get('project') pool = self.cleaned_data.get('pool') subnet = None
return _("If this is not intentional, it is a bug. Please report it. If it is intentional, please get into a contact with network administrators to arrange new project entry with you own ESSID for you.")
return _("If this is not intentional, it is a bug. Please report it. If it is intentional, please get in contact with network administrators to arrange a new project entry with your own ESSID for you.")
def to_help_string(code): """ A helper method for transforming a warning code to a human readable help string.
f.write('LinkQualityDijkstraLimit 0 9.0\n')
def __generateOlsrdConfig(self, f): # Subnet configuration if self.subnets: f.write('Hna4\n') f.write('{\n') for subnet in self.subnets: if subnet['olsr'] and subnet['cidr'] < 29: f.write(' %(subnet)s %(mask)s\n' % subnet) f.write('}\n\n') # General configuration (static) f.write('AllowNoInt yes\n') f.write('UseHysteresis no\n') f.write('LinkQualityFishEye 0\n') f.write('Willingness 3\n') f.write('LinkQualityLevel 2\n') f.write('LinkQualityAging 0.1\n') f.write('LinkQualityAlgorithm "etx_ff"\n') f.write('LinkQualityDijkstraLimit 0 9.0\n') f.write('FIBMetric "flat"\n') f.write('Pollrate 0.025\n') f.write('TcRedundancy 2\n') f.write('MprCoverage 3\n') f.write('NatThreshold 0.75\n') f.write('SmartGateway no\n') f.write('MainIp {0}\n'.format(self.ip)) f.write('SrcIpRoutes yes\n') f.write('\n') # Setup txtinfo plugin when selected if 'olsrd-mod-txtinfo' in self.packages: f.write('LoadPlugin "olsrd_txtinfo.so.0.1"\n') f.write('{\n') f.write(' PlParam "accept" "0.0.0.0"\n') f.write('}\n') f.write('\n') # Setup actions plugin to trigger a nodewatcher script when the default # route is added or removed from the routing table if self.hasClientSubnet: f.write('LoadPlugin "olsrd_actions.so.0.1"\n') f.write('{\n') f.write(' PlParam "trigger" "0.0.0.0>/etc/actions.d/olsr_gateway_action"\n') for dns in self.dns: f.write(' PlParam "trigger" "%s>/etc/actions.d/olsr_dns_action"\n' % dns) f.write('}\n') f.write('\n') # Add the olsrd-mod-actions package self.addPackage('olsrd-mod-actions') # General interface configuration (static) def interfaceConfiguration(name, ip): f.write('Interface "{0}"\n'.format(name)) f.write('{\n') f.write(' IPv4Multicast 255.255.255.255\n') if ip is not None: f.write(' IPv4Src {0}\n'.format(ip)) f.write(' HelloInterval 5.0\n') f.write(' HelloValidityTime 40.0\n') f.write(' TcInterval 7.0\n') f.write(' TcValidityTime 161.0\n') f.write(' MidInterval 18.0\n') f.write(' MidValidityTime 324.0\n') f.write(' HnaInterval 18.0\n') f.write(' HnaValidityTime 324.0\n') f.write('}\n') f.write('\n') # Additional interface configuration for interface in self.interfaces: if interface['olsr']: interfaceConfiguration(interface['name'], interface['ip']) f.close()