query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
draw a generic stretched rectangle
def rectStreatch(self,(x,y,xs,ys),(u,v,us,vs)): # do clipping now: color = Vec4(1,1,1,1) w = self.w h = self.h u,v,us,vs = u/w,1-v/h,(u+us)/w,1-(v+vs)/h self.drawer.rectangle( x,y,xs,ys, u,v,us-u,vs-v, #u/self.w,v/self.h,us/self.w,vs/self.h, color)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rectDraw(rect):\n ax = plt.gca()\n w = max(0, rect[1] - rect[0])\n h = max(0, rect[3] - rect[2])\n ax.add_patch(patches.Rectangle((rect[0], rect[2]), w, h, edgecolor='g', facecolor='none'))", "def drawRectangle(width, height, tilt, penColor, fillColor):\n Lucia.color(penColor,fillColor)\n Lucia.seth(tilt)\n Lucia.begin_fill()\n for i in range(2):\n Lucia.forward(width)\n Lucia.left(90)\n Lucia.forward(height)\n Lucia.left(90)\n Lucia.end_fill()", "def draw_rect(surface, fill_color, outline_color, rect, border=1):\n surface.fill(outline_color, rect)\n surface.fill(fill_color, rect.inflate(-border * 2, -border * 2))\n return rect", "def draw_rect(t, rect):\n t.pu()\n t.goto(rect.corner.x, rect.corner.y)\n t.pd()\n t.setheading(0)\n for i in range(2):\n t.fd(rect.width)\n t.lt(90)\n t.fd(rect.height)\n t.lt(90)", "def drawRectangle(x, y, width, height):\n pen1.up()\n pen1.goto(x, y)\n pen1.down()\n pen1.fd(width)\n pen1.right(90)\n pen1.fd(height)\n pen1.right(90)\n pen1.fd(width)\n pen1.right(90)\n pen1.fd(height)", "def draw_rect(self, center_x, center_y, radius, thickness):\n\n center_x = int(center_x)\n center_y = int(center_y)\n radius = int(radius)\n thickness = int(thickness)\n\n edge_length = int(radius * 0.3)\n\n x_ranges = list(range(center_x - radius - thickness, center_x - edge_length)) + list(range(center_x + edge_length, center_x + radius + thickness))\n y_ranges = list(range(center_y - radius - thickness, center_y - radius)) + list(range(center_y + radius, center_y + radius + thickness))\n\n for x in x_ranges:\n for y in y_ranges:\n\n if self.image_width > x >= 0 and self.image_height > y >= 0: # for the frames' limit protection.\n [b, g, r] = self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, 1, 0])\n\n if g <= 100:\n if g == 0:\n g = 1\n self.image[y, x] = [0, 0, 1]\n greenness_rate = (255 / g) / 0.12\n self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, greenness_rate, 0])\n\n y_ranges = list(range(center_y - radius - thickness, center_y - edge_length)) + list(range(center_y + edge_length, center_y + radius + thickness))\n x_ranges = list(range(center_x - radius - thickness, center_x - radius)) + list(range(center_x + radius, center_x + radius + thickness))\n\n for y in y_ranges:\n for x in x_ranges:\n\n if self.image_width > x >= 0 and self.image_height > y >= 0: # for the frames' limit protection.\n [b, g, r] = self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, 1, 0])\n\n if g <= 100:\n if g == 0:\n g = 1\n self.image[y, x] = [0, 0, 1]\n greenness_rate = (255 / g) / 0.12\n self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, greenness_rate, 0])\n\n x_ranges = list(range(int(center_x - radius * 1.5), int(center_x - edge_length))) + list(range(int(center_x + edge_length), int(center_x + radius * 1.5)))\n\n for x in x_ranges:\n if self.image_width > x >= 0: # for the frames' limit protection.\n self.image[center_y, x] = numpy.array(self.image[center_y, x]) * numpy.array([0, 2, 0])\n\n y_ranges = list(range(int(center_y - radius * 1.5), int(center_y - edge_length))) + list(range(int(center_y + edge_length), int(center_y + radius * 1.5)))\n\n for y in y_ranges:\n if self.image_height > y >= 0: # for the frames' limit protection.\n self.image[y, center_x] = numpy.array(self.image[y, center_x]) * numpy.array([0, 2, 0])", "def draw_rectangle(t, w, h):\r\n for i in range(2):\r\n t.forward(w)\r\n t.left(90)\r\n t.forward(h)\r\n t.left(90)", "def drawRectangle(img, top_left, bottom_right, color = (0,0,255), thickness = 3):\n\tcv2.rectangle(img, top_left, bottom_right, color, thickness)", "def draw_rect(mat, pt1, pt2, color=(0, 0, 255), thickness=1):\n cv2.rectangle(mat, pt1, pt2, color, thickness=thickness)", "def draw_rect(im, rect, linewidth=2, color=(0.0, 1.0, 0.4)):\n color = np.asarray(color)\n def clip0(x):\n return np.clip(x, 0, im.shape[0])\n def clip1(x):\n return np.clip(x, 0, im.shape[1])\n\n im[clip0(rect[0]-linewidth):clip0(rect[0]), \n clip1(rect[1]-linewidth):clip1(rect[3]+linewidth)] = color \n\n im[clip0(rect[2]):clip0(rect[2]+linewidth), \n clip1(rect[1]-linewidth):clip1(rect[3]+linewidth)] = color \n\n im[clip0(rect[0]):clip0(rect[2]), \n clip1(rect[1]-linewidth):clip1(rect[1])] = color \n\n im[clip0(rect[0]):clip0(rect[2]), \n clip1(rect[3]):clip1(rect[3]+linewidth)] = color", "def draw_rect(self, x, y, w, h, color=None):\n self._draw_fast_hline(x, y, w, color)\n self._draw_fast_hline(x, y + h - 1, w, color)\n self._draw_fast_vline(x, y, h, color)\n self._draw_fast_vline(x + w - 1, y, h, color)", "def DrawBase(screen, base_x, base_y, base_len, base_width):\n pygame.draw.rect(screen, (255,0,0),(base_x, base_y, base_len*2, base_width*2), 4)", "def draw_round_rect(self, x, y, w, h, r, color=None, aa=False):\n self._draw_fast_hline(x + r, y, w - 2 * r, color, aa) # Top\n self._draw_fast_hline(x + r, y + h - 1, w - 2 * r, color, aa) # Bottom\n self._draw_fast_vline(x, y + r, h - 2 * r, color, aa) # Left\n self._draw_fast_vline(x + w - 1, y + r, h - 2 * r, color, aa) # Right\n # draw four corners\n self._draw_circle_helper(x + r, y + r, r, 1, color)\n self._draw_circle_helper(x + w - r - 1, y + r, r, 2, color)\n self._draw_circle_helper(x + w - r - 1, y + h - r - 1, r, 4, color)\n self._draw_circle_helper(x + r, y + h - r - 1, r, 8, color)", "def drawRectangle(x,y,width,height,rounding=0,ucoords=1):\n if ucoords:\n dislin.rlrnd(x,y,width,height,rounding)\n else:\n dislin.rndrec(x,y,width,height,rounding)", "def draw_rectangle(img, st_pos=(0, 0), ed_pos=(640-1, 480-1),\n color=(1.0, 1.0, 0.0)):\n\n # convert `color` from float to int.\n max_value = np.iinfo(img.dtype).max\n color_val = np.array(color)\n color_val = np.round(color_val * max_value).astype(img.dtype)\n\n # conposite!\n st_h, st_v = st_pos\n ed_h, ed_v = ed_pos\n img[st_v, st_h:ed_h, :] = color_val\n img[ed_v, st_h:ed_h, :] = color_val\n img[st_v:ed_v, st_h, :] = color_val\n img[st_v:ed_v, ed_h, :] = color_val", "def draw_rectangle_filled(center_x, center_y, width, height, color,\n tilt_angle=0):\n\n GL.glEnable(GL.GL_BLEND)\n GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)\n GL.glEnable(GL.GL_LINE_SMOOTH)\n GL.glHint(GL.GL_LINE_SMOOTH_HINT, GL.GL_NICEST)\n GL.glHint(GL.GL_POLYGON_SMOOTH_HINT, GL.GL_NICEST)\n\n # Set color\n if len(color) == 4:\n GL.glColor4ub(color[0], color[1], color[2], color[3])\n elif len(color) == 3:\n GL.glColor4ub(color[0], color[1], color[2], 255)\n\n GL.glLoadIdentity()\n GL.glTranslatef(center_x, center_y, 0)\n if tilt_angle:\n GL.glRotatef(tilt_angle, 0, 0, 1)\n\n GL.glBegin(GL.GL_QUADS)\n GL.glVertex3f(-width // 2, -height // 2, 0.5)\n GL.glVertex3f(width // 2, -height // 2, 0.5)\n GL.glVertex3f(width // 2, height // 2, 0.5)\n GL.glVertex3f(-width // 2, height // 2, 0.5)\n GL.glEnd()", "def _generate_rectangle(figure: plotting.figure, source: plotting.ColumnDataSource):\n return figure.rect(\n x=\"x\",\n y=\"y\",\n width=\"width\",\n height=\"height\",\n line_color=\"#000000\",\n fill_color=\"color\",\n fill_alpha=0.7,\n line_width=1.5,\n source=source,\n )", "def drawRectangle(img, rect, color):\n \n if len(rect) is not 4:\n # TODO throw error\n return;\n rect = rect * DISPLAY_SCALE;\n x1, y1, x2, y2 = rect.astype(numpy.int32);\n cv2.rectangle(img, (x1, y1), (x2, y2), color, 2);", "def round_rect(x, y, w, h, i):\n X, Y, W, H = int(x + 10), int(y + 10), int(w - 20), int(h - 20)\n\n pygame.draw.rect(gameDisplay, i, (x, Y, w, H))\n pygame.draw.rect(gameDisplay, i, (X, y, W, h))\n\n pygame.draw.circle(gameDisplay, i, (X, Y), 10)\n pygame.draw.circle(gameDisplay, i, (X + W, Y), 10)\n pygame.draw.circle(gameDisplay, i, (X, Y + H), 10)\n pygame.draw.circle(gameDisplay, i, (X + W, Y + H), 10)\n\n pygame.draw.rect(gameDisplay, i, (X, Y, W, H))", "def round_rect(x,y,w,h, i):\n X,Y,W,H=int(x+10),int(y+10),int(w-20),int(h-20)\n\n pygame.draw.rect(gameDisplay, i, (x,Y, w, H))\n pygame.draw.rect(gameDisplay, i, (X,y, W, h))\n\n pygame.draw.circle(gameDisplay, i, (X,Y), 10)\n pygame.draw.circle(gameDisplay, i, (X+W,Y), 10)\n pygame.draw.circle(gameDisplay, i, (X,Y+H), 10)\n pygame.draw.circle(gameDisplay, i, (X+W,Y+H), 10)\n\n pygame.draw.rect(gameDisplay, i, (X,Y,W,H))", "def round_rect(self, surface, rect, color, rad=20, border=0, inside=(0,0,0,0)):\n rect = pygame.Rect(rect)\n zeroed_rect = rect.copy()\n zeroed_rect.topleft = 0,0\n image = pygame.Surface(rect.size).convert_alpha()\n image.fill((0,0,0,0))\n self.render_region(image, zeroed_rect, color, rad)\n if border:\n zeroed_rect.inflate_ip(-2*border, -2*border)\n self.render_region(image, zeroed_rect, inside, rad)\n surface.blit(image, rect)", "def drawRectWithBorder(screen, bColor, fColor, posX, posY, height, width, bWidth):\n \n #draw outline rect \n pygame.draw.rect(screen, bColor, (posX, posY, height, width))\n #draw fill rect\n pygame.draw.rect(screen, fColor, (posX + bWidth, posY + bWidth, height - bWidth * 2, width - bWidth * 2))", "def DrawRectangleRect(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRectangleRect(*args, **kwargs)", "def draw_xywh_rectangle_filled(top_left_x, top_left_y, width, height, color):\n center_x = top_left_x + (width / 2)\n center_y = top_left_y + (height / 2)\n draw_rectangle_filled(center_x, center_y, width, height, color)", "def DrawRectangle(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRectangle(*args, **kwargs)", "def test_rectangle_draw(self):\n rectangle = Rectangle(x_pts=10, y_pts=40, width_pts=100,\n height_pts=120, line_width_pts=5)\n self.assertEqual(rectangle.ps,\n 'newpath 10 40 moveto\\n'\n '100 0 rlineto\\n'\n '0 120 rlineto\\n'\n '-100 0 rlineto\\n'\n '0 -120 rlineto\\n'\n 'closepath\\n'\n '5 setlinewidth\\n'\n 'stroke\\n')", "def DrawRectangle(*args, **kwargs):\n return _gdi_.GraphicsContext_DrawRectangle(*args, **kwargs)", "def wdraw_rectangle(self, wx0, wy0, wx1, wy1, fill, outline):\r\n dx0, dy0 = self.w_to_d(wx0, wy0)\r\n dx1, dy1 = self.w_to_d(wx1, wy1)\r\n self.canvas.create_rectangle(dx0, dy0, dx1, dy1, fill=fill, outline=outline)", "def rect(self, x, y, w, h, color):\n self.hline(x, y, w, color)\n self.vline(x, y, h, color)\n self.vline(x + w - 1, y, h, color)\n self.hline(x, y + h - 1, w, color)", "def draw_rectangle(self, roi, color, thickness=2):\n top_left = self._format_point(Point(roi[0], roi[1]))\n bottom_right = self._format_point(Point(roi[2], roi[3]))\n opencv.rectangle(self.img, top_left.tuple(), bottom_right.tuple(), color.bgra(), thickness=thickness)", "def draw_rectangle_outline(center_x, center_y, width, height, color,\n border_width=1, tilt_angle=0):\n\n GL.glEnable(GL.GL_BLEND)\n GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)\n GL.glEnable(GL.GL_LINE_SMOOTH)\n GL.glHint(GL.GL_LINE_SMOOTH_HINT, GL.GL_NICEST)\n GL.glHint(GL.GL_POLYGON_SMOOTH_HINT, GL.GL_NICEST)\n\n GL.glLoadIdentity()\n GL.glTranslatef(center_x, center_y, 0)\n if tilt_angle:\n GL.glRotatef(tilt_angle, 0, 0, 1)\n\n # Set line width\n GL.glLineWidth(border_width)\n\n # Set color\n if len(color) == 4:\n GL.glColor4ub(color[0], color[1], color[2], color[3])\n elif len(color) == 3:\n GL.glColor4ub(color[0], color[1], color[2], 255)\n\n GL.glBegin(GL.GL_LINE_LOOP)\n GL.glVertex3f(-width // 2, -height // 2, 0.5)\n GL.glVertex3f(width // 2, -height // 2, 0.5)\n GL.glVertex3f(width // 2, height // 2, 0.5)\n GL.glVertex3f(-width // 2, height // 2, 0.5)\n GL.glEnd()", "def draw_rect(r):\n for i in range(2):\n turtle.fd(r.width)\n turtle.lt(90)\n turtle.fd(r.height)\n turtle.lt(90)", "def draw_rect(r):\n for i in range(2):\n turtle.fd(r.width)\n turtle.lt(90)\n turtle.fd(r.height)\n turtle.lt(90)", "def fill_rect(self, x, y, width, height, color=Color['white']):\n area = [x, y, width, height]\n pygame.draw.rect(self.display, color, area)", "def draw_square(self, x, y, color):\n return self.canvas.create_rectangle(x * self.scale, y * self.scale, \\\n (x + 1) * self.scale, (y + 1) * self.scale, fill = color)", "def square(square_x, square_y, square_width, square_height, square_color):\n arcade.draw_rectangle_filled(square_x, square_y, square_width, square_height, square_color)", "def draw_rect(x, y, width, height):\r\n global _canvas\r\n global _current_color\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n rect = Rectangle(width, height, Point(x+width/2, y+height/2))\r\n _set_not_filled(rect)\r\n _canvas.add(rect)", "def actually_draw_square(canvas_mode, canvas_size_in_pixels, canvas_background_color_rgb, rectangle_position, rectangle_fill, rectangle_outline):\n im = Image.new(canvas_mode, canvas_size_in_pixels, canvas_background_color_rgb)\n dr = ImageDraw.Draw(im)\n\n dr.rectangle(rectangle_position, fill=rectangle_fill, outline=rectangle_outline)\n\n im.save(\"square.png\")", "def add_rect(self, x, y, w, h, fill_style=None, outline_style=None):\n fill_style = self.__prepare_style(fill_style, self.style.char)\n outline_style = self.__prepare_style(outline_style, 'o')\n for px in range(x, x + w):\n for py in range(y, y + h):\n if self.check_coord_in_range(px, py):\n if px == x or px == x + w - 1 or py == y or py == y + h - 1:\n self.canvas[py][px] = outline_style\n else:\n self.canvas[py][px] = fill_style", "def frame_rect(surface: Surface, color, rect: Rect, thick: int = 1):\n surface.fill(color, (rect.left, rect.top, rect.width, thick))\n surface.fill(color, (rect.left, rect.bottom - thick, rect.width, thick))\n surface.fill(color, (rect.left, rect.top, thick, rect.height))\n surface.fill(color, (rect.right - thick, rect.top, thick, rect.height))", "def rect(self, x, y, width, height, color, *, fill=False):\n # pylint: disable=too-many-arguments\n if self.rotation == 1:\n x, y = y, x\n width, height = height, width\n x = self.width - x - width\n if self.rotation == 2:\n x = self.width - x - width\n y = self.height - y - height\n if self.rotation == 3:\n x, y = y, x\n width, height = height, width\n y = self.height - y - height\n\n # pylint: disable=too-many-boolean-expressions\n if (\n width < 1\n or height < 1\n or (x + width) <= 0\n or (y + height) <= 0\n or y >= self.height\n or x >= self.width\n ):\n return\n x_end = min(self.width - 1, x + width - 1)\n y_end = min(self.height - 1, y + height - 1)\n x = max(x, 0)\n y = max(y, 0)\n if fill:\n self.format.fill_rect(self, x, y, x_end - x + 1, y_end - y + 1, color)\n else:\n self.format.fill_rect(self, x, y, x_end - x + 1, 1, color)\n self.format.fill_rect(self, x, y, 1, y_end - y + 1, color)\n self.format.fill_rect(self, x, y_end, x_end - x + 1, 1, color)\n self.format.fill_rect(self, x_end, y, 1, y_end - y + 1, color)", "def DrawRectangle(*args, **kwargs):\n return _gdi_.DC_DrawRectangle(*args, **kwargs)", "def draw_rect(img, x_init, y_init, x, y):\n # invert colors to show the selected area\n img[y_init:y, x_init:x] = 255 - orig_img[y_init:y, x_init:x]\n cv.rectangle(img, (x_init, y_init), (x, y), (0, 255, 0), 1)", "def draw_rectangle(self, can_page, x_start, y_start, width_rect, height_rect, line_width, stroke_color, fill_color,\n dash_style, stroke_mode, fill_mode, text_color):\n # ใƒญใ‚ฐ\n log.debug(self)\n try:\n can_page.setLineWidth(line_width)\n can_page.setStrokeColor(stroke_color)\n if fill_color is None:\n fill_mode = 0\n else:\n can_page.setFillColor(fill_color)\n can_page.setDash(dash_style)\n can_page.rect(x_start, y_start, width_rect, height_rect, stroke=stroke_mode, fill=fill_mode)\n can_page.setFillColor(text_color, alpha=None)\n except:\n # ไพ‹ๅค–ๅ‡ฆ็†\n # log.error(traceback.format_exc())\n constant.get_error(constant.ERROR_003)", "def rectangle(xcenter, ycenter, width, height):\n x1, x2 = xcenter - width, xcenter + width\n y1, y2 = ycenter - height, ycenter + height\n return Shape([Point(x1, y1), Point(x1, y2), Point(x2, y2), Point(x2, y1)])", "def draw_rect(self, color, position, size, border_width = 0, anchor= 'topleft'):\n # We'll try to make sure that everything is okay later\n \n color = spyral.color._determine(color)\n offset = self._calculate_offset(anchor, size)\n pygame.draw.rect(self._surf, color, (position + offset, size), border_width)", "def draw(self):\n if self.master != None :\n outline = Cell.FILLED_COLOR_BORDER if self.fill else Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = self.fill, outline = outline)", "def draw_filled_rect(x, y, width, height):\r\n global _canvas\r\n global _current_color\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n rect = Rectangle(width, height, Point(x+width/2, y+height/2))\r\n _set_filled(rect)\r\n _canvas.add(rect)", "def draw_square(self, surface, color, position):\n rect = pygame.Rect(position, (50, 50))\n pygame.draw.rect(surface, color, rect)", "def draw_rectangle(image, rect, color=(0,255,255)):\n x,y,w,h = rect\n cv2.rectangle(image, (x,y), (x+w,y+h), color, 3)\n return image", "def draw_round_rect_filled(self, x, y, w, h, r, color=None, aa=False):\n self.draw_rect_filled(x + r, y, w - 2 * r, h, color, aa)\n self._draw_circle_filled_helper(x + w - r - 1, y + r, r,\n 1, h - 2 * r - 1, color)\n self._draw_circle_filled_helper(x + r, y + r, r, 2, h - 2 * r - 1, color)", "def draw_rect(self, i, j, col, d=0):\n pygame.draw.rect(self.screen, col, self.get_rect(i, j), d)", "def DrawRectangleRect(*args, **kwargs):\n return _gdi_.DC_DrawRectangleRect(*args, **kwargs)", "def draw_SVG_square(self, (w, h), (x, y), (rx, ry), parent):\n style = self.get_style()\n attr = {\n 'style': style,\n 'height': str(h),\n 'width': str(w),\n 'x': str(x),\n 'y': str(y),\n 'rx': str(rx),\n 'ry': str(ry)\n }\n return inkex.etree.SubElement(parent, inkex.addNS('rect', 'svg'), attr)", "def DrawRoundedRectangle(*args, **kwargs):\n return _gdi_.GraphicsContext_DrawRoundedRectangle(*args, **kwargs)", "def DrawVistaRectangle(self, dc, rect, hasfocus):\r\n\r\n if hasfocus:\r\n \r\n outer = _rgbSelectOuter\r\n inner = _rgbSelectInner\r\n top = _rgbSelectTop\r\n bottom = _rgbSelectBottom\r\n\r\n else:\r\n \r\n outer = _rgbNoFocusOuter\r\n inner = _rgbNoFocusInner\r\n top = _rgbNoFocusTop\r\n bottom = _rgbNoFocusBottom\r\n\r\n oldpen = dc.GetPen()\r\n oldbrush = dc.GetBrush()\r\n\r\n bdrRect = wx.Rect(*rect.Get())\r\n filRect = wx.Rect(*rect.Get())\r\n filRect.Deflate(1,1)\r\n \r\n r1, g1, b1 = int(top.Red()), int(top.Green()), int(top.Blue())\r\n r2, g2, b2 = int(bottom.Red()), int(bottom.Green()), int(bottom.Blue())\r\n\r\n flrect = float(filRect.height)\r\n if flrect < 1:\r\n flrect = self._lineHeight\r\n\r\n rstep = float((r2 - r1)) / flrect\r\n gstep = float((g2 - g1)) / flrect\r\n bstep = float((b2 - b1)) / flrect\r\n\r\n rf, gf, bf = 0, 0, 0\r\n dc.SetPen(wx.TRANSPARENT_PEN)\r\n \r\n for y in xrange(filRect.y, filRect.y + filRect.height):\r\n currCol = (r1 + rf, g1 + gf, b1 + bf)\r\n dc.SetBrush(wx.Brush(currCol, wx.SOLID))\r\n dc.DrawRectangle(filRect.x, y, filRect.width, 1)\r\n rf = rf + rstep\r\n gf = gf + gstep\r\n bf = bf + bstep\r\n \r\n dc.SetBrush(wx.TRANSPARENT_BRUSH)\r\n dc.SetPen(wx.Pen(outer))\r\n dc.DrawRoundedRectangleRect(bdrRect, 3)\r\n bdrRect.Deflate(1, 1)\r\n dc.SetPen(wx.Pen(inner))\r\n dc.DrawRoundedRectangleRect(bdrRect, 2)\r\n\r\n dc.SetPen(oldpen)\r\n dc.SetBrush(oldbrush)", "def DrawRoundedRectangle(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRoundedRectangle(*args, **kwargs)", "def DrawRoundedRectangleRect(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRoundedRectangleRect(*args, **kwargs)", "def rectangle(self, clear_screen=True, x1=10, y1=10, x2=80, y2=40, fill_color='black', outline_color='black'):\n\n if clear_screen:\n self.clear()\n\n return self.draw.rectangle((x1, y1, x2, y2), fill=fill_color, outline=outline_color)", "def fill_rectangle(self, x, y, w, h, color):\n if self.is_off_grid(x, y, x + w - 1, y + h - 1):\n return\n if w > h:\n self.fill_hrect(x, y, w, h, color)\n else:\n self.fill_vrect(x, y, w, h, color)", "def rectangle(size = (4,2), layer = 0):\n D = Device(name = 'rectangle')\n points = [[size[0], size[1]], [size[0], 0], [0, 0], [0, size[1]]]\n D.add_polygon(points, layer = layer)\n return D", "def draw_square(display, coord, box_size, color, bg_color):\n left, top = coord\n half = int(box_size * 0.5)\n quarter = int(box_size * 0.25)\n pygame.draw.rect(\n display, color, (left + quarter, top + quarter, half, half))\n return", "def draw(self, win):\n pygame.draw.rect(win, self.color, self.rect)", "def DrawRoundedRectangle(*args, **kwargs):\n return _gdi_.DC_DrawRoundedRectangle(*args, **kwargs)", "def rectangle(self, char, left, top, width, height, filled=False, thickness=1):\n\n if thickness != 1:\n raise NotImplementedError('The pytextcanvas module is under development and the filled, thickness, and endcap parameters are not implemented. You can contribute at https://github.com/asweigart/pytextcanvas')\n\n pointsIterable = pybresenham.rectangle(left, top, width, height, filled, thickness)\n self.points(char, pointsIterable)", "def draw_rectangle(overlay, key, color):\n draw = ImageDraw.Draw(overlay)\n for pttype in locations[key][2]:\n ptcent = [WORKSIZE[0]*locations[key][0], WORKSIZE[1]*locations[key][1]]\n ptsize = [WORKSIZE[0]*interface[pttype][0]*0.5, WORKSIZE[1]*interface[pttype][1]*0.5]\n coord1 = [ptcent[0]-ptsize[0], ptcent[1]-ptsize[1]]\n coord2 = [ptcent[0]+ptsize[0], ptcent[1]+ptsize[1]]\n if interface[pttype][2] == 'color':\n draw.rectangle([coord1[0], coord1[1], coord2[0], coord2[1]], fill=color)\n elif interface[pttype][2] == 'transparent':\n draw.rectangle([coord1[0], coord1[1], coord2[0], coord2[1]], fill=(0, 0, 0, 0))\n del draw", "def psychopy_rectangle(\n window,\n x=0,\n y=0,\n size_width=1,\n size_height=1,\n rotate=0,\n color=\"black\",\n outline=0,\n outline_color=\"black\",\n alpha=1,\n adjust_width=False,\n adjust_height=False,\n **kwargs,\n):\n # Try loading psychopy\n try:\n from psychopy import visual\n except ImportError:\n raise ImportError(\n \"The 'psychopy' module is required for this function to run. \",\n \"Please install it first (`pip install PsychoPy`).\",\n )\n\n # Adjust size for screen ratio\n if adjust_width is True:\n size_width = size_width * (window.size[1] / window.size[0])\n if adjust_height is True:\n size_height = size_height * (window.size[0] / window.size[1])\n\n # Get coordinates\n x1, y1, x2, y2 = _coord_rectangle(image=window, x=x, y=y, size_width=size_width,\n size_height=size_height, method=\"psychopy\")\n\n # Rectangle parameters\n rect = visual.Rect(\n win=window,\n units='pix',\n width=x2-x1,\n height=y2-y1,\n fillColor=color,\n lineWidth=outline,\n **kwargs,\n )\n x = (x1 + x2)/2\n y = (y1 + y2)/2\n rect.pos = [x-window.size[0]/2, y-window.size[1]/2]\n rect.lineColor = outline_color\n\n # Alpha\n if alpha > 0:\n rect.opacity = alpha\n\n # Orientation\n if rotate != 0:\n rect.ori = rotate\n\n # Display\n rect.draw()", "def draw_shape_rounded_rectangle(self, rect, xform, colour):\n for shape in rect.as_arcs_lines():\n getattr(self, 'draw_shape_%s' % shape.type)(shape, xform, colour)", "def DrawRoundedRectangleRect(*args, **kwargs):\n return _gdi_.DC_DrawRoundedRectangleRect(*args, **kwargs)", "def square(self, char, left, top, length, filled=False, thickness=1):\n pointsIterable = pybresenham.rectangle(left, top, length, length, filled, thickness)\n self.points(char, pointsIterable)", "def draw_rect(self, color, position, size=None,\n border_width=0, anchor='topleft'):\n if size is None:\n rect = spyral.Rect(position)\n else:\n rect = spyral.Rect(position, size)\n offset = self._calculate_offset(anchor, rect.size)\n pygame.draw.rect(self._surf, color,\n (rect.pos + offset, rect.size), border_width)\n self._version += 1\n spyral.util.scale_surface.clear(self._surf)\n return self", "def draw_rect_filled(self, x, y, w, h, color=None, aa=False):\n for i in range(x, x + w):\n self._draw_fast_vline(i, y, h, color, aa)", "def draw_box(stdscr, y, x, height, width, mode=0):\n if mode == 0:\n stdscr.addstr(y, x, \"โ”Œ\" + \"โ”€\" * (width - 1) + \"โ”\")\n stdscr.addstr(y + height, x, \"โ””\" + \"โ”€\" * (width - 1) + \"โ”˜\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"โ”‚\")\n stdscr.addstr(i, x + width, \"โ”‚\")\n if mode == 1:\n stdscr.addstr(y, x, \"โ•ญ\" + \"โ”€\" * (width - 1) + \"โ•ฎ\")\n stdscr.addstr(y + height, x, \"โ•ฐ\" + \"โ”€\" * (width - 1) + \"โ•ฏ\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"โ”‚\")\n stdscr.addstr(i, x + width, \"โ”‚\")\n if mode == 2:\n stdscr.addstr(y, x, \"โ•”\" + \"โ•\" * (width - 1) + \"โ•—\")\n stdscr.addstr(y + height, x, \"โ•š\" + \"โ•\" * (width - 1) + \"โ•\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"โ•‘\")\n stdscr.addstr(i, x + width, \"โ•‘\")", "def aa_round_rect(surface, rect, color, rad=20, border=0, inside=(0, 0, 0)):\n rect = pg.Rect(rect)\n _aa_render_region(surface, rect, color, rad)\n if border:\n rect.inflate_ip(-2 * border, -2 * border)\n _aa_render_region(surface, rect, inside, rad)", "def proxy_rect(**kwargs):\r\n return matplotlib.patches.Rectangle((0, 0), 1, 1, **kwargs)", "def _draw_rectangle(data, obj, draw_options):\n # Objects with labels are plot objects (from bar charts, etc). Even those without\n # labels explicitly set have a label of \"_nolegend_\". Everything else should be\n # skipped because they likely correspong to axis/legend objects which are handled by\n # PGFPlots\n label = obj.get_label()\n if label == \"\":\n return data, []\n\n # Get actual label, bar charts by default only give rectangles labels of\n # \"_nolegend_\". See <https://stackoverflow.com/q/35881290/353337>.\n handles, labels = obj.axes.get_legend_handles_labels()\n labelsFound = [\n label for h, label in zip(handles, labels) if obj in h.get_children()\n ]\n if len(labelsFound) == 1:\n label = labelsFound[0]\n\n left_lower_x = obj.get_x()\n left_lower_y = obj.get_y()\n ff = data[\"float format\"]\n do = \",\".join(draw_options)\n right_upper_x = left_lower_x + obj.get_width()\n right_upper_y = left_lower_y + obj.get_height()\n cont = (\n f\"\\\\draw[{do}] (axis cs:{left_lower_x:{ff}},{left_lower_y:{ff}}) \"\n f\"rectangle (axis cs:{right_upper_x:{ff}},{right_upper_y:{ff}});\\n\"\n )\n\n if label != \"_nolegend_\" and label not in data[\"rectangle_legends\"]:\n data[\"rectangle_legends\"].add(label)\n cont += \"\\\\addlegendimage{{ybar,ybar legend,{}}};\\n\".format(\n \",\".join(draw_options)\n )\n cont += f\"\\\\addlegendentry{{{label}}}\\n\\n\"\n return data, cont", "def rect(self, x, y, w, h, cls=None, style=None):\n x, y, w, h = self._meta.units(x, y, w, h)\n cls_str = 'class=\"%s\" ' % cls if cls else ''\n style_str = 'style=\"%s\" ' % self._meta.make_style(style) if style else ''\n self.elements.append(\"\"\"\n <rect x=\"%s\" y=\"%s\" width=\"%s\" height=\"%s\" %s%s/>\n \"\"\".strip() % (\n x, y, w, h, cls_str, style_str\n ))", "def rect(self, x, y, w, h, cls=None, style=None):\n x, y, w, h = self._meta.units(x, y, w, h)\n cls_str = 'class=\"%s\" ' % cls if cls else ''\n style_str = 'style=\"%s\" ' % self._meta.make_style(style) if style else ''\n self.elements.append(\"\"\"\n <rect x=\"%s\" y=\"%s\" width=\"%s\" height=\"%s\" %s%s/>\n \"\"\".strip() % (\n x, y, w, h, cls_str, style_str\n ))", "def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):\n arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color)", "def decorate(self):\n\n c = self.canvas\n c.rect(20, 20, 20, 20, fill=1) # bt lf\n c.rect(self.width - 40, 20, 20, 20, fill=1) # bt rt\n c.rect(20, self.height - 40, 20, 20, fill=1) # tp lf\n c.rect(self.width/2 - 10, 20, 20, 20, fill=1) # bt md\n c.rect(20, self.height/2 - 10, 20, 20, fill=1) # md lf\n c.rect(self.width - 40, self.height - 40, 20, 20, fill=1) # tp rt\n c.rect(self.width - 40, self.height/2 - 10, 20, 20, fill=1) # md rt", "def render(self, context):\n pygame.draw.rect(context, (255, 0, 0), self.box)", "def drawRect (self, x, y, w, h, colour):\r\n for i in range (y,y+h):\r\n row = self.image [i]\r\n\r\n for j in range (x,x+w):\r\n row [j] = colour", "def test_draw_tile_background(self):\n screen = utils.init_game()\n rect_obj = utils.draw_tile_background(screen)\n self.assertIsInstance(rect_obj, pg.Rect)", "def draw_rounded_rect(self, context, x, y, width, height, radius, lineWidth):\n from math import pi\n degrees = pi / 180\n\n context.set_line_width(lineWidth)\n context.set_source_rgba(0.5, 0.0, 0.0, 1.0) # Red\n\n # cr.new_sub_path()\n context.arc(x + width - radius, y + radius, radius, -90 * degrees, 0 * degrees)\n context.arc(x + width - radius, y + height - radius, radius, 0 * degrees, 90 * degrees)\n context.arc(x + radius, y + height - radius, radius, 90 * degrees, 180 * degrees)\n context.arc(x + radius, y + radius, radius, 180 * degrees, 270 * degrees)\n context.close_path()\n context.stroke_preserve()\n context.set_source_rgba(0.0, 0.5, 0.5, 1.0)\n # and use it to fill the path (that we had kept)\n context.fill()\n context.stroke()", "def __draw(self, screen):\n\n pygame.draw.rect(screen, (200, 255, 200), (self.x, self.y, self.width, self.height))", "def DrawRoundedRectanglePointSize(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRoundedRectanglePointSize(*args, **kwargs)", "def draw(self, cr, width, height):\n cr.set_source_rgb(0, 0, 0)\n cr.rectangle(0, 0, width, height)\n cr.fill()", "def render_rectangle_filled(shape, center_x, center_y, color, tilt_angle=0):\n # Set color\n if len(color) == 4:\n GL.glColor4ub(shape.color[0], shape.color[1], shape.color[2],\n shape.color[3])\n GL.glEnable(GL.GL_BLEND)\n GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)\n elif len(color) == 3:\n GL.glDisable(GL.GL_BLEND)\n GL.glColor4ub(shape.color[0], shape.color[1], shape.color[2], 255)\n\n GL.glBindBuffer(GL.GL_ARRAY_BUFFER, shape.vbo_id)\n GL.glVertexPointer(2, GL.GL_FLOAT, 0, 0)\n\n GL.glLoadIdentity()\n GL.glTranslatef(center_x + shape.width / 2, center_y + shape.height / 2, 0)\n if tilt_angle != 0:\n GL.glRotatef(tilt_angle, 0, 0, 1)\n\n GL.glDrawArrays(GL.GL_QUADS, 0, shape.size)", "def paint(self, draw, x, y, w, h):\n\t\tpass", "def _draw_square(self, left_x, top_y, side, color, fill):\n self.pen.up()\n self.pen.color(color)\n self.pen.goto(left_x, top_y)\n self.pen.down()\n self.pen.begin_fill()\n for _ in range(4):\n self.pen.forward(side)\n self.pen.right(90)\n self.pen.end_fill()", "def addRoundRect(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n pass", "def draw(self):\n if self.master != None :\n fill = self.fill\n #fill = Cell.FILLED_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n\n #if not self.fill:\n # fill = Cell.EMPTY_COLOR_BG\n # outline = Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def rounded_rectangle(src: np.array, top_left: tuple, bottom_right: tuple, cornerRadius: int = cornerRadius, color: tuple = (255,255,255), thickness: int = 1, lineType: int=cv2.LINE_AA) -> Any:\r\n # corners:\r\n # p1 - p2\r\n # | |\r\n # p4 - p3\r\n\r\n p1 = Point(top_left[0], top_left[1])\r\n p2 = Point(bottom_right[0], top_left[1])\r\n p3 = Point(bottom_right[0], bottom_right[1])\r\n p4 = Point(top_left[0], bottom_right[1])\r\n\r\n # Fill\r\n if thickness < 0:\r\n main_rect = [Point(p1.x + cornerRadius, p1.y), Point(p3.x - cornerRadius, p3.y)]\r\n left_rect = [Point(p1.x + cornerRadius, p1.y + cornerRadius), Point(p4.x, p4.y - cornerRadius)]\r\n right_rect = [Point(p2.x - cornerRadius, p2.y + cornerRadius), Point(p3.x, p3.y - cornerRadius)]\r\n\r\n [cv2.rectangle(src, rect[0].toTuple(), rect[1].toTuple(), color, thickness) for rect in [main_rect, left_rect, right_rect]]\r\n\r\n # Outline\r\n cv2.line(src, (p1.x+cornerRadius,p1.y), (p2.x-cornerRadius,p2.y), color, abs(thickness), lineType);\r\n cv2.line(src, (p2.x,p2.y+cornerRadius), (p3.x,p3.y-cornerRadius), color, abs(thickness), lineType);\r\n cv2.line(src, (p4.x+cornerRadius,p4.y), (p3.x-cornerRadius,p3.y), color, abs(thickness), lineType);\r\n cv2.line(src, (p1.x,p1.y+cornerRadius), (p4.x,p4.y-cornerRadius), color, abs(thickness), lineType);\r\n\r\n # Arc\r\n cv2.ellipse(src, (p1+Point(cornerRadius, cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 180.0, 0, 90, color, thickness, lineType);\r\n cv2.ellipse(src, (p2+Point(-cornerRadius, cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 270.0, 0, 90, color, thickness, lineType);\r\n cv2.ellipse(src, (p3+Point(-cornerRadius, -cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 0.0, 0, 90, color, thickness, lineType);\r\n cv2.ellipse(src, (p4+Point(cornerRadius, -cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 90.0, 0, 90, color, thickness, lineType);", "def draw(self, color, x, y) -> None:\n y_pos = y * self.scale\n x_pos = x * self.scale\n pg.draw.rect(self.screen, color, [x_pos, y_pos, self.scale-self.offset, self.scale-self.offset])", "def rect(self, xi: int, yi: int, xf: int, yf: int, color: int, fill=False):\n width = xf - xi\n height = yf - yi\n\n if fill:\n for x in range(xi, xf + 1):\n for y in range(yi, yf + 1):\n self.pixel(x, y, color)\n else:\n self.hline(xi, yi, width, color)\n self.hline(xi, yf, width, color)\n self.vline(xi, yi, height, color)\n self.vline(xf, yi, height, color)", "def draw_rectangle(self, color, left_angle: Point, width, height):\n _lta = self.T.itrans(left_angle)\n pg.draw.rect(self.screen, color, (_lta()[0], _lta()[1], width, height))", "def box_to_rect(box, color, linewidth=3):\r\n box = box.asnumpy()\r\n return plt.Rectangle(\r\n (box[0], box[1]), box[2]-box[0], box[3]-box[1],\r\n fill=False, edgecolor=color, linewidth=linewidth)", "def DrawRoundedRectanglePointSize(*args, **kwargs):\n return _gdi_.DC_DrawRoundedRectanglePointSize(*args, **kwargs)", "def draw_bounds():\n\n pass", "def fill(self, x, y, width=None, height=None, char=None,\n fg=(255, 255, 255), bg=None):\n self.console.draw_rect(x, y, width, height, char, fg, bg)" ]
[ "0.7395274", "0.72937334", "0.7197906", "0.71213543", "0.70573896", "0.7036794", "0.7014493", "0.7004895", "0.6928151", "0.69037634", "0.68924004", "0.6890206", "0.68809015", "0.68503785", "0.6802015", "0.67864347", "0.6693077", "0.66908354", "0.66905236", "0.6684021", "0.6673184", "0.66610646", "0.6660451", "0.6637953", "0.66249543", "0.6619069", "0.66161364", "0.66083", "0.6605865", "0.6603613", "0.6583787", "0.6544242", "0.6544242", "0.65380996", "0.6536124", "0.652704", "0.6525573", "0.6521757", "0.6511344", "0.65018564", "0.64963436", "0.6471824", "0.64714956", "0.64654875", "0.6459462", "0.6448237", "0.64426464", "0.64419395", "0.64402837", "0.64327586", "0.64320105", "0.6428277", "0.64200026", "0.6405028", "0.6397263", "0.6392878", "0.63878876", "0.6377277", "0.63620484", "0.6359243", "0.6358957", "0.6355024", "0.63396645", "0.63150537", "0.63019156", "0.6290306", "0.6272352", "0.62695134", "0.62657595", "0.6260317", "0.62559265", "0.62332785", "0.6230599", "0.62237674", "0.6204215", "0.620305", "0.6198316", "0.6198316", "0.61735666", "0.6169771", "0.6169466", "0.61668056", "0.613526", "0.61336756", "0.6132884", "0.6121716", "0.6120631", "0.61116534", "0.61116", "0.61088645", "0.61072654", "0.6085701", "0.6081869", "0.6078226", "0.6055362", "0.60494787", "0.6040475", "0.6037788", "0.60286003", "0.6027887" ]
0.7258826
2
Gifts a bottle of cola!
async def cola( client, event, user: ('user', 'To who?') = None, ): if user is None: source_user = client target_user = event.user else: source_user = event.user target_user = user return Embed(description = f'{source_user:f} just gifted a bottle of cola to {target_user:f} !')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def climb(self):\n print(\"Inside WoodElf.climb\")", "def cool(self):\n self.t = self.t - 1", "def jump(self):\r\n if self.grounded == True:\r\n self.vel.y = -13", "def bounce(self, diff):\r\n\r\n self.direction = (180 - self.direction) % 360\r\n self.direction -= diff", "def bounce(self, diff):\n \n self.direction = (180 - self.direction) % 360\n self.direction -= diff", "def hill_climb(self, velocity, angle):\n\n velocity *= (5/18) # k/h to m/s\n power = self.mass * gravity * velocity * math.sin(angle * (math.pi)/180) # watts\n \n if power < 0:\n return 0\n\n return power # W", "def turn(self):\n pass", "def moveBolts(self, timer):\n for i in self.get_bolts():\n i.y+=i.get_velocity()", "def knock_back(self):\n FORWARD_VEL = -2\n\n self.rect.x += self.x_vel\n\n if self.name == 'player':\n if self.rect.x >= (self.origin_pos[0] + 10):\n self.x_vel = FORWARD_VEL\n elif self.rect.x <= self.origin_pos[0]:\n self.rect.x = self.origin_pos[0]\n self.state = 'battle resting'\n self.x_vel = 0\n else:\n if self.rect.x <= (self.origin_pos[0] - 10):\n self.x_vel = 2\n elif self.rect.x >= self.origin_pos[0]:\n self.rect.x = self.origin_pos[0]\n self.state = 'battle resting'\n self.x_vel = 0", "def myBarbieHome(pos):\n bulldozer(pos)\n ground(pos)\n mc.postToChat(\"Ground done !\")\n\n pos.z += 5\n makeTheHouse(pos)\n mc.postToChat(\"House done !\")\n\n theRoof(pos)\n mc.postToChat(\"Roof done !\")\n\n makeTheDeco(pos, flowers = wFlower_Cyan)\n mc.postToChat(\"ALL Work done !\")", "def move_lift_ground():\n return _move_lift(0)", "def bowling_two(balls, frame=1):\n\n if len(balls) <= 2 or frame == 10:\n return sum(balls)\n elif balls[0] == 10:\n return balls[0] + balls[1] + balls[2] + bowling_two(balls[1:], frame+1) # strike\n elif balls[0] + balls[1] == 10:\n return balls[0] + balls[1] + balls[2] + bowling_two(balls[2:], frame+1) # spare\n else:\n return balls[0] + balls[1] + bowling_two(balls[2:], frame+1) # open", "def passBall(state, tm, powerPasse, thetaPasse, coeffPushUp):\n dest = tm.position\n vitesse = tm.vitesse.copy().normalize()\n if vitesse.dot(state.attacking_vector) < 0.:\n vitesse = (-0.5) * vitesse\n dest += coeffPushUp*vitesse\n return kickAt(state, dest, passPower(state, dest, powerPasse, thetaPasse))", "def move_lift_up():\n return _move_lift(1)", "def movement(self):", "def bat_update(self, player):\n if self.bounce_count > 0: #TEMP\n self.bounce()\n return\n target = player.current_tile()\n if(target != None):\n self.moveTowards(player.current_tile())", "def make_move(B, cur_monkey_pos, cur_num_balloons, cur_num_lives, move):\n\n def check_lose(B, cur_monkey_pos):\n \"\"\"\n Args:\n B (tuple): board configuration\n cur_monkey_pos (int): current column position of the monkey\n Output:\n bool: True if a balloon will hit the monkey when the balloons shift down; False otherwise\n \"\"\"\n assert B[-1][cur_monkey_pos] == \"x\"\n if B[-2][cur_monkey_pos] != 0:\n return True\n return False\n\n def shift_down(B, cur_monkey_pos, cur_num_lives):\n \"\"\"\n Just performs the shift of all the balloons downwards.\n Args:\n B (tuple): board configuration\n cur_monkey_pos (int): current column position of the monkey\n cur_num_lives (int): current number of lives in this configuration\n Output:\n (tuple, int): tuple consisting of the board configuration after balloons have all moved\n down by 1 and the new number of lives (or None if the monkey gets hit)\n \"\"\"\n\n if check_lose(B, cur_monkey_pos):\n return None\n\n new_board = []\n new_num_lives = cur_num_lives\n\n # construct the top row: if the balloon hits the ground, it respawns with +1 and we lose a life\n new_num_lives -= sum(1 for b in B[-2] if b > 0)\n top_row = tuple((b + 1 if 0 < b < 3 else b) for b in B[-2])\n new_board.append(top_row)\n\n # move all the middle rows down\n new_board.extend([r for r in B[:-2]])\n\n # add the ground row: nothing changes\n new_board.append(B[-1])\n\n return (tuple(new_board), new_num_lives)\n\n def partial_move(B, cur_monkey_pos, cur_num_balloons, move):\n \"\"\"\n Just performs the move, without the shift downwards\n Args:\n B (tuple): board configuration\n cur_monkey_pos (int): current column position of the monkey\n cur_num_balloons (int): current number of balloons on the board\n move (str): the proposed move (one of 'left', 'right', 'shoot')\n Output:\n (tuple, int, int): A tuple consisting of the board configuration after the move,\n the new monkey position, and the new number of balloons on the map\n (or None if invalid move)\n \"\"\"\n\n assert B[-1][cur_monkey_pos] == \"x\"\n R = len(B)\n C = len(B[0])\n\n new_board = [r for r in B[:-1]]\n new_bottom_row = [0 for _ in range(C)]\n new_monkey_pos = cur_monkey_pos\n new_num_balloons = cur_num_balloons\n\n if move == \"left\":\n if new_monkey_pos == 0:\n return None\n new_monkey_pos -= 1\n elif move == \"right\":\n if new_monkey_pos == C - 1:\n return None\n new_monkey_pos += 1\n elif move == \"shoot\":\n # simulate the dart\n for row in range(R - 2, -1, -1):\n if B[row][new_monkey_pos] != 0:\n new_row = list(B[row])\n new_row[new_monkey_pos] -= 1\n if new_row[new_monkey_pos] == 0:\n new_num_balloons -= 1\n new_board[row] = tuple(new_row)\n break\n else:\n assert False, \"invalid move: \" + move\n\n new_bottom_row[new_monkey_pos] = \"x\"\n new_board.append(tuple(new_bottom_row))\n return (tuple(new_board), new_monkey_pos, new_num_balloons)\n\n # make the move\n move_res = partial_move(B, cur_monkey_pos, cur_num_balloons, move)\n if move_res is None: # invalid move\n return None\n move_board, new_monkey_pos, new_num_balloons = move_res # unpack\n\n # shift all the balloons down\n shift_res = shift_down(move_board, new_monkey_pos, cur_num_lives)\n if shift_res is None: # check if a balloon hit the monkey\n return None\n new_board, new_num_lives = shift_res # unpack\n return (new_board, new_monkey_pos, new_num_balloons, new_num_lives)", "def jump(distance):\r\n t.penup()\r\n t.forward(200)\r\n t.pendown()\r\n return None", "def move_lift_down():\n return _move_lift(0.2)", "def jump(self):\n if (self.falling or self.rising) and self.doubleJump:\n self.speed_y = -20 # //////Aquรญ se cambia la velocidad incial cuando se salta//////\n self.fallin = False\n self.rising = True\n self.doubleJump = False\n\n if not self.falling and not self.rising:\n self.speed_y = -20 # //////Aquรญ se cambia la velocidad incial cuando se salta//////\n self.rising = True", "def jump(self):\n\t\tself.vel = -10\n\t\tself.tick_count = 0\n\t\tself.height = self.y", "def sling_action():\n global mouse_distance\n global rope_length\n global angle\n global mouse_x_pos\n global mouse_y_pos\n\n #add code inside sling function\n # Fixing bird to the sling rope\n vec = vector((initial_x_sling, initial_y_sling), (mouse_x_pos, mouse_y_pos))\n unit_vec = unit_vector(vec)\n uv_1 = unit_vec[0]\n uv_2 = unit_vec[1]\n mouse_distance = distance(initial_x_sling, initial_y_sling, mouse_x_pos, mouse_y_pos) #point at which currrent bird id\n fix_pos = (uv_1*rope_length+initial_x_sling, uv_2*rope_length+initial_y_sling)\n highest_length = 102 #when stretched\n\n #to make bird stay within rope\n x_redbird = mouse_x_pos - 20\n y_redbird = mouse_y_pos - 20\n if mouse_distance > rope_length:\n pux, puy = fix_pos\n pux -= 20\n puy -= 20\n first_pos = pux, puy\n screen.blit(redbird, first_pos)\n second_pos = (uv_1*highest_length+initial_x_sling, uv_2*highest_length+initial_y_sling) #current position\n pygame.draw.line(screen, (255, 0, 0), (next_x_sling, next_y_sling), second_pos, 5) #catapult rope\n screen.blit(redbird, first_pos)\n pygame.draw.line(screen, (255, 0, 0), (initial_x_sling, initial_y_sling), second_pos, 5) #ANOTHER SIDE of catapult\n else:\n #when not fully stretched\n mouse_distance += 10\n third_pos = (uv_1*mouse_distance+initial_x_sling, uv_2*mouse_distance+initial_y_sling)\n pygame.draw.line(screen, (0, 0, 0), (next_x_sling, next_y_sling), third_pos, 5)\n screen.blit(redbird, (x_redbird, y_redbird))\n pygame.draw.line(screen, (0, 0, 0), (initial_x_sling, initial_y_sling), third_pos, 5)\n # Angle of impulse\n\n change_in_y = mouse_y_pos - initial_y_sling\n change_in_x = mouse_x_pos - initial_x_sling\n if change_in_x == 0:\n dx = 0.00000000000001\n angle = math.atan((float(change_in_y))/change_in_x)", "def Oink(times=1):\n \n print \"Oink\" + \" oink\" * (int(times) - 1) + \"!\"", "def ride(self):\n print(\"Riding\")\n self.miles += 10\n return self", "def grow_fungi(self, wall):\n if self.direction == 1:\n ledge_fungus = FirstLedge(self.rect.centery, self.room, wall, 'right')\n self.room.can_climb.add(ledge_fungus)\n else:\n ledge_fungus = FirstLedge(self.rect.centery, self.room, wall, 'left')\n self.room.can_climb.add(ledge_fungus)", "def walk(self):\n self.speed = self.speed + (0.2 * self.legs)", "def tick():\n move_balls(targets_speed)\n move_super_balls(targets_speed * 2)", "def sprinkler(l):\n t.right(90)\n t.forward(l / 2)\n t.right(-90)\n t.circle(l / 2)\n t.circle(- l / 2)\n t.left(90)\n t.forward(l / 2)\n t.right(90)\n t.forward(l)\n t.right(90)\n t.forward(l / 2)\n t.right(-90)\n t.circle(l / 2)\n t.circle(- l / 2)", "def sidebounce(self):\r\n self.dx=-self.dx", "def jump(self):\n self.vy = -9", "def tick(self):\n time.sleep(self.sleep_time)\n self.time += 1\n print(\"[Turn \" + str(self.time) + \"] Tick tock...\")\n directions = [(0, -1), (1, -1), (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1)]\n for i in range(len(self.robots)):\n self.robots[i][2] = (self.robots[i][2] + self.robots[i][3]) % 8\n self.robots[i][3] = 0\n self.robots[i][0] += directions[self.robots[i][2]][0]\n self.robots[i][1] += directions[self.robots[i][2]][1]\n if self.robots[i][0] < 0 or self.robots[i][0] >= self.width or \\\n self.robots[i][1] < 0 or self.robots[i][1] >= self.height:\n self.robots = []\n raise RobotWallCrashException # A robot crashed into a wall! Simulation over!\n for j in range(len(self.robots)):\n if i != j:\n if self.robots[i][0] == self.robots[j][0] and self.robots[i][1] == self.robots[j][1]:\n self.robots = []\n raise RobotCollisionException # A robot crashed into another robot! Simulation over!\n for j in range(len(self.items)):\n if self.robots[i][0] == self.items[j][0] and self.robots[i][1] == self.items[j][1]:\n if self.items[j][2] == 1:\n self.robots = []\n raise RobotFoundTreasureException # A robot found the treasure! You win!\n elif self.items[j][2] == 2:\n self.robots = []\n raise RobotObjectCrashException # A robot crashed into an object!\n if random.random() > self.reliability:\n print(\"*glug-glug-glug* Oil leak detected!\")\n self.items.append([self.robots[i][0], self.robots[i][1], 2])", "def moveturtle(x,y,t):\n t.penup()\n t.goto(x,y)\n t.pendown()", "def cutwire(self, irc, msg, args, channel, cutWire):\n channel = ircutils.toLower(channel)\n try:\n if not self.bombs[channel].active:\n return\n if not ircutils.nickEqual(self.bombs[channel].victim, msg.nick):\n irc.reply('You can\\'t cut the wire on someone else\\'s bomb!')\n return\n self.bombs[channel].cutwire(irc, cutWire)\n except KeyError:\n pass\n irc.noReply()", "def brachistrone(bdist):\n myShip.ship_time_from_d(bdist)\n myShip.setvar(\"stime\", 2*myShip.stime)\n myShip.velocity_self()\n myplanet.ptime_from_ship_time(myShip.accel, myShip.stime)\n myShip.gamma_self()", "def action(self, colony):\n \n if not self.is_true_king:\n self.reduce_armor(self.armor)\n else:\n super().action(colony)\n check_place = self.place.exit\n while check_place is not None:\n dragon = check_place.dragon\n if dragon is not None and dragon not in self.buffed_dragons:\n check_place.dragon.damage *=2\n self.buffed_dragons.append(check_place.dragon)\n if dragon is not None and dragon.is_container:\n contained_dragon = dragon.contained_dragon\n if contained_dragon is not None and contained_dragon not in self.buffed_dragons:\n check_place.dragon.contained_dragon.damage *=2\n self.buffed_dragons.append(check_place.dragon.contained_dragon)\n check_place = check_place.exit", "def bonus_food(self):\n self.penup()\n self.shape(\"turtle\")\n self.color(\"red\")\n self.x_cordinates = random.randint(-210, 210)\n self.y_cordinates = random.randint(-210, 210)\n self.goto(self.x_cordinates, self.y_cordinates)\n print(f\"This Is Bonus Food {self.x_cordinates} and {self.y_cordinates}\")", "def increase_car_speed(self):\r\n self.car_speed += 5", "def make_coffee(self, drink):\n for ingredient in drink.ingredients:\n self.resources[ingredient] -= drink.ingredients[ingredient]\n self.profit += drink.cost", "def shift_balance(mp, standing_leg):\n pass", "def jump(self):\n\t\tself._is_falling = True\n\t\tself._dy = -5", "def make_coffee(self, order):\n for item in order.ingredients:\n self.resources[item] -= order.ingredients[item]\n print(f\"Here is your {order.name} โ˜•๏ธ. Enjoy!\")", "def u_turn(self, direction, diameter_in):\n \n# pdb.set_trace()\n # Calculate radius of turn for the inside wheel.\n r_in = diameter_in / 2\n\n # Outside radius is 20 inches from inside radius.\n r_out = r_in + MuleBot.WHEEL_BASE_LENGTH\n \n # Outside travel distance\n travel = r_out * 3.14159\n travel_revolutions = travel / MuleBot.CIRCUM_IN\n \n r_ratio = r_out / r_in\n #r_ratio_half = r_ratio / 2\n\n speed_multiplier = MuleBot.MAX_RPM / r_ratio\n\n outside_rpm = r_ratio * speed_multiplier\n inside_rpm = speed_multiplier\n \n \n # \n # minutes at outside_rpm\n minutes = travel_revolutions / outside_rpm\n seconds = minutes * MuleBot.SECONDS_PER_MINUTE\n \n # Something isn't quite perfect.\n if direction == 'left':\n if diameter_in < 25:\n seconds -= 1\n else:\n seconds -= 2\n else:\n if diameter_in < 25:\n seconds += 1\n else:\n seconds += 2\n\n if direction == 'left':\n v_l = self.rpm_to_rps(inside_rpm)\n v_r = self.rpm_to_rps(outside_rpm)\n else:\n v_r = self.rpm_to_rps(inside_rpm)\n v_l = self.rpm_to_rps(outside_rpm)\n\n #print(\"2inside: rpm: \", inside_rpm)\n #print(\"2outside: rpm: \", outside_rpm)\n \n #print(\"2.1: v_l: \", v_l)\n #print(\"2.1: v_r: \", v_r)\n\n # Set wheel drive rates.\n self.set_wheel_drive_rates(v_l, v_r)\n\n # Sleep during the turn.\n time.sleep(seconds)\n\n # Stop\n self.stop()\n \n # Move forward 24 inches.\n self.forward(24)", "def theRoof(pos, blockTypeMain = wool , mainColor=wPurple, replaceGlass = wGlass):\n \n # try again the same trick to add the roof\n # Middle part\n for i in range(0,12,1):\n iy = i\n if i >= 6:\n iy=11-i\n #print i, iy\n mc.setBlocks(pos.x-4+i, pos.y+10+iy, pos.z+4,\n pos.x-4+i, pos.y+10+iy, pos.z+29, blockTypeMain, mainColor)\n\n # RIGHT SIDE of the house\n for ii in range(0,3,1):\n mc.setBlocks(pos.x-5+ii, pos.y+9+ii, pos.z+5+ii,\n pos.x-13+ii, pos.y+9+ii, pos.z+29-ii, blockTypeMain, mainColor)\n #Remove the blocks\n\n material = air\n if ii >=2 :\n material = replaceGlass\n mc.setBlocks(pos.x-5+ii, pos.y+9+ii, pos.z+8,\n pos.x-11+ii, pos.y+9+ii, pos.z+26-ii, material)\n \n # and LEFT side of the house\n xAdjust = 21\n for ii in range(0,3,1):\n mc.setBlocks(pos.x-5-ii+xAdjust, pos.y+9+ii, pos.z+5+ii,\n pos.x-13-ii+xAdjust, pos.y+9+ii, pos.z+29-ii, blockTypeMain, mainColor)\n #Remove the blocks\n\n material = air\n if ii >=2 :\n material = replaceGlass\n mc.setBlocks(pos.x-7-ii+xAdjust, pos.y+9+ii, pos.z+8,\n pos.x-13-ii+xAdjust, pos.y+9+ii, pos.z+26-ii, material)", "def increase_radius(self, character):\n character.bombradius += 1", "def bounce(x):\r\n \r\n # Velocity lost after a bounce\r\n Cx = 0.8\r\n Cy = 0.7\r\n \r\n # Flip x and y velocity\r\n if x[3] > 0:\r\n x[2] = -x[2] * Cx\r\n x[3] = - x[3] * Cy", "def eat_sugar(self):\n self.amount = 0", "def move_boats():\n hit_manatee = False\n for i in range(len(map)-1, -1, -1):\n for j in range(len(map[i])-1, -1, -1):\n if map[i][j] == \"*\":\n # Only runs if the entity is a boat\n if i + 1 >= len(map):\n continue\n if map[i+1][j] == \" \":\n # Moves boat downward if possible\n if i + 2 < len(map) and map[i+2][j] == \"M\":\n hit_manatee = True\n map[i+2][j] = \"W\"\n map[i+1][j] = \"*\"\n map[i][j] = \" \"\n elif map[i+1][j] == \"*\":\n # Boats colliding with each other\n new_boat_pos = (i, j)\n if j + 1 < len(map[i]) and map[i][j+1] == \" \" \\\n and map[i+1][j+1] == \" \":\n new_boat_pos = (i+1, j+1)\n elif j - 1 >= 0 and map[i][j-1] == \" \" \\\n and map[i+1][j-1] == \" \":\n new_boat_pos = (i+1, j-1)\n else:\n continue\n\n # Moves boat down to new position\n map[i][j] = \" \"\n map[new_boat_pos[0]][new_boat_pos[1]] = \"*\"\n if new_boat_pos[0] + 1 < len(map) and \\\n map[new_boat_pos[0] + 1][new_boat_pos[1]] == \"M\":\n hit_manatee = True\n map[new_boat_pos[0] + 1][new_boat_pos[1]] = \"W\"\n return \"injured\" if hit_manatee else None", "def snowflake(turtle, length, depth):\r\n if depth == 0:\r\n turtle.move_forward(length)\r\n else:\r\n snowflake(turtle, length * 0.333, depth-1)\r\n turtle.turn_left(60)\r\n snowflake(turtle, length * 0.333, depth-1)\r\n turtle.turn_right(120)\r\n snowflake(turtle, length * 0.333, depth-1)\r\n turtle.turn_left(60)\r\n snowflake(turtle, length * 0.333, depth-1)", "def jump(self, x):\n self.change_y += x * self.speed", "def make_coffee(self, order):\n for item in order.ingredients:\n self.resources[item] -= order.ingredients[item]\n print(f\"Here is your {order.name} โ˜•๏ธ. Enjoy!\")", "def _step(self, a):\n # potential_old = self.potential\n obs, rew, done, info = super()._step(a)\n # state = self.robot.calc_state()\n # alive = float(self.robot.alive_bonus(state[0]+self.robot.initial_z, self.robot.body_rpy[1]))\n # alive *= 0.01\n\n # cost = 0.001 * -np.square(a).sum()\n\n # progress = float(self.potential - potential_old)\n # print (\"Rewarsd\", alive, progress)\n # rew = alive + progress + cost\n # if self.robot.body_xyz[0] > 5:\n # rew = 1.0\n # else:\n # rew = 0.0\n return obs, rew, done, info", "def turn():\n \n robottype = get_type()\n if robottype == RobotType.PAWN:\n pawn_turn()\n else:\n overlord_turn()\n bytecode = get_bytecode()", "def turn():\n \n robottype = get_type()\n if robottype == RobotType.PAWN:\n pawn_turn()\n else:\n overlord_turn()\n bytecode = get_bytecode()", "def step(self, action): # action is nb-cops-sized or 1-sized\n reward = 0\n done = False\n\n action = np.array(action)\n\n def old_pos(set=None):\n if set is None:\n return self.cops_pos if self.is_cops_turn else self.rob_pos\n else:\n if self.is_cops_turn:\n self.cops_pos = action\n else:\n self.rob_pos = action\n\n invalids = []\n\n if self.is_first_turn:\n self.graph.set_cr(action, self.is_cops_turn)\n else:\n edges = self.graph.get_rep()[old_pos(), action]\n invalids = edges != 1\n invalids[action == old_pos()] = False\n invalids = np.where(invalids == True)[0]\n if invalids.shape[0] != 0:\n action[invalids] = old_pos()[invalids] # correct action\n self.graph.set_cr(action, self.is_cops_turn)\n\n old_pos(action)\n if not self.is_cops_turn and self.is_first_turn:\n self.is_first_turn = False\n self.is_cops_turn = not self.is_cops_turn\n if self.rob_pos is not None and self.rob_pos[0] in self.cops_pos:\n print(\"Cops won\")\n done = True\n reward += (1 if self.is_cops_turn else -1) * REWARD_END_WL\n\n reward += (-1 if self.is_cops_turn else +1) * REWARD_STEP_WL\n reward -= len(invalids) * REWARD_INVALID\n\n observation = self.graph.get_attr()\n\n if self.is_cops_turn:\n self.cops_rew += reward\n else:\n self.rob_rew += reward\n\n if not done:\n if self.is_cops_turn and self.cops is not None:\n observation, _, done, _ = self.step(self.cops.act(observation))\n elif not self.is_cops_turn and self.robber is not None:\n observation, _, done, _ = self.step(self.robber.act(observation))\n return observation, reward, done, {}", "def bounce(self):\n self.y_dir *= -1 # Reverse vertical direction of travel", "def _step(self, a):\n # potential_old = self.potential\n obs, rew, done, info = super()._step(a)\n # state = self.robot.calc_state()\n # alive = float(self.robot.alive_bonus(state[0]+self.robot.initial_z, self.robot.body_rpy[1]))\n # alive *= 0.05\n # cost = 0.01 * -np.square(a).sum()\n # progress = float(self.potential - potential_old)\n # # print (\"Rewarsd\", alive, progress)\n # rew = alive + progress + cost\n # # if self.robot.body_xyz[0] > 5:\n # # rew = 1.0\n # # else:\n # # rew = 0.0\n # # print (\"ROBOT: \", self.robot.body_xyz[2] < 0.3)\n # # if done:\n # # print (\"DONE\")\n return obs, rew, done, info", "def on_loop(self):\n\n if not Syringe.craftable:\n if Syringe.can_be_crafted(self.macgyver.inventory):\n Syringe.craftable = True\n self.notification.active('craft-available').set_timer(2)\n\n # Check if MacGyver threw himself against a wall...\n if sprite.spritecollide(self.macgyver, self.walls, False):\n self.macgyver.rollback()\n\n # Macgyver will collect the item and add it to it's inventory...\n for item in sprite.spritecollide(self.macgyver, self.items, False):\n item.collect(self.macgyver.inventory)\n\n # if self.macgyver.coordinates == self.finish_point:\n # self.notification.active('win')", "def give(r):\n r.rotate(\"r_shoulder_pan_joint\", 0.5)\n time.sleep(2)\n r.rotate(\"r_shoulder_lift_joint\", -1.0)\n time.sleep(2)\n r.rotate(\"r_elbow_flex_joint\", 1.8)\n time.sleep(2)", "def __call__(self, brd):\n brdcpy = brd.copy() \n return brdcpy[self.y][self.x].makemove(self.direction)", "def sling_action():\n global mouse_distance\n global rope_lenght\n global angle\n global x_mouse\n global y_mouse\n # Fixing bird to the sling rope\n v = vector((sling_x, sling_y), (x_mouse, y_mouse))\n uv = unit_vector(v)\n uv1 = uv[0]\n uv2 = uv[1]\n # mouse_distance = distance(sling_x, sling_y, x_mouse, y_mouse)\n sling = Vec2d(sling_x, sling_y)\n mouse = Vec2d(x_mouse, y_mouse)\n mouse_distance = (sling - mouse).length\n\n pu = (uv1*rope_lenght+sling_x, uv2*rope_lenght+sling_y)\n bigger_rope = 102\n x_redbird = x_mouse - 20\n y_redbird = y_mouse - 20\n if mouse_distance > rope_lenght:\n pux, puy = pu\n pux -= 20\n puy -= 20\n pul = pux, puy\n screen.blit(redbird, pul)\n pu2 = (uv1*bigger_rope+sling_x, uv2*bigger_rope+sling_y)\n pygame.draw.line(screen, (0, 0, 0), (sling2_x, sling2_y), pu2, 5)\n screen.blit(redbird, pul)\n pygame.draw.line(screen, (0, 0, 0), (sling_x, sling_y), pu2, 5)\n else:\n mouse_distance += 10\n pu3 = (uv1*mouse_distance+sling_x, uv2*mouse_distance+sling_y)\n pygame.draw.line(screen, (0, 0, 0), (sling2_x, sling2_y), pu3, 5)\n screen.blit(redbird, (x_redbird, y_redbird))\n pygame.draw.line(screen, (0, 0, 0), (sling_x, sling_y), pu3, 5)\n # Angle of impulse\n dy = y_mouse - sling_y\n dx = x_mouse - sling_x\n if dx == 0:\n dx = 0.00000000000001\n angle = math.atan((float(dy))/dx)", "def main():\r\n\r\n movetwotimes()\r\n pick_beeper()\r\n move()\r\n turn_left()\r\n movetwotimes()\r\n put_beeper()\r\n turn_left()\r\n turn_left()\r\n movetwotimes()\r\n rotatethreetimes()\r\n movetwotimes()\r\n move()\r\n turn_left()\r\n turn_left()", "def eat(self, jungle: Jungle):", "def step(self, action):\n if action[1] > 0:\n throttle = action[1]\n brake = 0.0\n reverse = False\n elif action[1] < 0:\n throttle = -action[1]\n brake = 0.0\n reverse = True\n else:\n throttle = 0.0\n reverse = False\n brake = 1.0\n\n for i in range(5):\n try:\n self.vehicle.apply_control(carla.VehicleControl(steer=action[0], throttle=throttle, brake=brake, reverse=reverse))\n reward = self._calculate_reward()\n break\n except RuntimeError as e:\n # Trying to reset for 5 times, on 5th try, raising error\n if i == 4:\n # assuming major error, raising error and stopping process\n raise RuntimeError(e)\n # print(f'Encountered RuntimeError, assuming temporary connection issues, trying again...({i+2}/5 attempts)')\n\n if self.collided:\n self.done = True\n # TODO should the collision reward be subtracted from the total reward, or rather should it replace the reward entirely?\n reward = self._reward_on_collision(self.collided) \n\n if self.time_step != 0 and self.time_step == self.max_steps:\n self.done = True\n \n # trying to prevent staying in one place\n current_location = self.vehicle.get_location()\n if self.blocked[0] == current_location:\n self.blocked[1] += 1\n else:\n self.blocked[0] = current_location\n self.blocked[1] = 0\n \n if self.blocked[1] >= 15: # x/10 sec\n reward = -1\n self.done = True\n\n self.rewards.append(reward)\n self.time_step += 1\n\n return np.rollaxis(np.array(self.rgb), -1, 0), reward, self.done, None # we return None to be compatible with a gym environment", "def shift_radius_circ(mutated_genome,index):\n limit = int(max(imagewidth,imagewidth)*0.1)\n radius = random.randint(-limit,limit)\n circle = mutated_genome[index][2]\n newcircle = (circle[0],circle[1],circle[2]+radius)\n mutated_genome[index][2] = newcircle", "def drive(self, kilometres_driven):\n self.fuel -= (self.litres_per_kilometre * kilometres_driven)", "def main():\n move()\n move()\n pick_beeper()\n move()\n turn_left()\n for i in range(2):\n move()\n put_beeper()\n turn_around()\n move_to_wall()\n turn_right()\n move_to_wall()\n turn_around()", "def step(self):\n self.world.slosh_oceans()\n self.world.transfer_energy_vertically()\n self.world.transfer_energy_horizontally()\n self.world.absorb_energy_from_core()\n self.world.absorb_energy_from_sun(self.sun)", "def take_turn(self, turn, actions, world, truck, time):\n self.turn += 1\n\n chosen_upgrade = self.select_upgrade(actions, truck)\n # If there is not an active contract get one\n if(truck.active_contract is None):\n #print(\"Select\")\n chosen_contract = self.select_new_contract(actions, truck)\n actions.set_action(ActionType.select_contract, chosen_contract)\n # Buy gas if below 20% and there is enough money to fill tank to full at max gas price\n elif (truck.speed != 100):\n actions.set_action(ActionType.set_speed, 100)\n print('speed')\n elif(truck.body.current_gas < 1 and truck.money > 100*truck.active_contract.game_map.current_node.gas_price):\n print(\"Gas\")\n actions.set_action(ActionType.buy_gas)\n # If health is below max and have enough money to fully repair do so\n elif truck.health < 100 and truck.money > 1000:\n #print(\"Heal\")\n actions.set_action(ActionType.repair)\n elif chosen_upgrade is not None:\n #print(\"Upgrade\")\n actions.set_action(ActionType.upgrade, chosen_upgrade)\n elif(truck.active_contract.game_map.current_node.next_node is not None):\n # Move to next node\n # Road can be selected by passing the index or road object\n #print(\"Move\")\n road = self.select_new_route(actions, truck)\n actions.set_action(ActionType.select_route, road)\n\n if self.turn == 69 or self.turn == 420:\n print(\"Funny Number! \" + truck.__str__())\n \n pass", "def kick_from_accretion(self):\n\n accth_mag = -self.v * self.pair.dmdt_accr / self.pair.mass\n kick0 = -self.vth_vec * accth_mag[0] * self.dt\n kick1 = self.vth_vec * accth_mag[1] * self.dt\n\n self.pair[0].velocity += kick0\n self.pair[1].velocity += kick1", "def com_turn(self):\r\n circle = copy.deepcopy(self.circle)\r\n# Creates a turtle to use in the computer turns\r\n t = turtle.Turtle()\r\n t.hideturtle()\r\n com_take= circle%5\r\n if com_take==0:\r\n com_take= random.choice([\"1\",\"2\",\"3\",\"4\"])#Random number between 1-4 if remainder is zero.\r\n\r\n com_take=str(com_take)\r\n self.update_scoreboard(com_take)\r\n com_take = int(com_take)\r\n self.remove_circle(com_take)\r\n self.circle -= com_take\r\n return self.circle", "def move_to(self, new_pos, pass_go=True):\r\n new_pos = new_pos % 40\r\n if self.pos > new_pos and pass_go:\r\n self.money += 200\r\n self.pos = new_pos", "def _undo_naughty_movement(self, set_balls_that_moved :Set[Ball], set_bots_that_moved :Set[Robot]):\n ball :Ball\n bot :Robot\n bln_naughty = True\n lng_naughty_loop_count = 0\n lng_naughty_loop_limit = len(self.lstBalls) + len(self.lstRobots) # worst case scenario\n while bln_naughty:\n lng_naughty_loop_count += 1\n if lng_naughty_loop_count > lng_naughty_loop_limit:\n if const.GAME_MODE:\n print(\"WARNING: UNABLE TO RESOLVE ALL COLLISIONS FOR FRAME.\")\n else:\n raise Exception(\"UNABLE TO RESOLVE ALL COLLISIONS FOR FRAME\")\n set_naughty_bots = set()\n set_naughty_balls = set()\n\n \"\"\" Ball vs Ball \"\"\"\n for ball1, ball2 in TrashyPhysics.collision_pairs_self(\n self.grpBalls, fncCollided=TrashyPhysics.balls_collided):\n set_naughty_balls.add(ball1)\n set_naughty_balls.add(ball2)\n\n \"\"\" Ball vs Bot \"\"\"\n for ball, bot in TrashyPhysics.collision_pairs(\n self.grpBalls, self.grpRobots,\n fncCollided=TrashyPhysics.ball_robot_collided):\n set_naughty_balls.add(ball)\n set_naughty_bots.add(bot)\n\n \"\"\" Ball vs Wall \"\"\"\n for ball in filter(lambda x: TrashyPhysics.collided_wall(x), self.lstBalls):\n set_naughty_balls.add(ball)\n\n \"\"\" Ball vs Bumper \"\"\"\n # todo\n\n \"\"\" Undo movement. \"\"\"\n bln_naughty = len(set_naughty_bots) + len(set_naughty_balls) > 0\n for bot in set_bots_that_moved & set_naughty_bots:\n set_bots_that_moved.remove(bot)\n if not bot.undo_move():\n print(f\"UNABLE TO UNDO MOVE FOR BOT: {bot}\")\n for ball in set_balls_that_moved & set_naughty_balls:\n set_balls_that_moved.remove(ball)\n if not ball.undo_move():\n print(f\"UNABLE TO UNDO MOVE FOR BALL: {ball}\")", "def run(self):\n ball = memory.world_objects.getObjPtr(core.WO_BALL)\n if ball.seen:\n self.finish()\n \n commands.setWalkVelocity(0, 0, -0.25)", "def move_bolt_side(self, x_velocity):\n self.x += x_velocity", "def moonCalcY(x,y,Mx,My, angle, velocityX, velocityY, massRocket, rocketForce):\r\n global G # initialising global variables\r\n global massEarth\r\n global massMoon\r\n global rocketStage\r\n xMoon = x-Mx #here we calculate the position of the projectile relative to the moon. Ie with the centre of the moon at (0,0)\r\n yMoon = y-My #same as line above but y coordinate\r\n speed = math.sqrt(velocityX**2 + velocityY**2) #here we calculate the scalar quantity speed of the projectile\r\n if rocketStage == 2 or rocketStage == 2.5: # if the rocket is in the acceleration stage\r\n rocketY = (rocketForce*velocityY)/(speed*massRocket) #calculating the acceleration on the rocket in the y direction due to the rocket engines being fired.\r\n return (-G*massEarth*y)/((x**2+y**2)**(3/2))+(-G*massMoon*yMoon)/((xMoon**2+yMoon**2)**(3/2))+rocketY #here we return the accelaration of the projectile, with the rocket engine accelaration added on.\r\n elif rocketStage == 4 or rocketStage == 4.5 or rocketStage == 5.5: # if the rocket is in the decelaration stage\r\n rocketY = (rocketForce*velocityY)/(speed*massRocket) #again we calculate the acceleration of the rocket in the y direction \r\n return (-G*massEarth*y)/((x**2+y**2)**(3/2))+(-G*massMoon*yMoon)/((xMoon**2+yMoon**2)**(3/2))-rocketY # here we calculate the accelaration of the projectile, the force from the rocket engines is subtracted because the rockets are acting in the opposite direction.\r\n else: # if the rocket is in free flight\r\n return (-G*massEarth*y)/((x**2+y**2)**(3/2))+(-G*massMoon*yMoon)/((xMoon**2+yMoon**2)**(3/2))#return only the accelaration due to gravity by combining forces from moon and earth.\r", "def upgrade(self):\r\n if self._size == 3:\r\n return\r\n self._size += 1\r\n self._money *= 2", "def eat(self):\n if self.environment[self.y][self.x] > 10:\n self.environment[self.y][self.x] -= 10\n self.store += 10", "def double_down(self):\n global chips\n chips -= self.bet\n self.bet = self.bet * 2\n self.hit(\"player\")\n self.endgame()", "def step(self):\n self.age += 1\n self.move_agent()\n self.sugar -= self.metabolism\n\n # Eat sugar\n available_sugar = self.get_sugar(self.pos).amount\n self.sugar += available_sugar\n# self.total_sugar_in_field -= available_sugar\n # Set sugar in current cell to zero\n self.get_sugar(self.pos).eat_sugar() \n \n \n \n if self.sugar == 0:\n self.model.remove_agent(self)\n \n self.gen += 1\n x = self.model.random.randrange(self.model.grid.width)\n y = self.model.random.randrange(self.model.grid.height)\n new_pos = (x,y)\n \n self.model.add_agent(Consumer, new_pos, f\"{self.unique_id.split('-')[0]}-{self.gen}\", self.gen, self.model.vision, self.model.metabolism, self.model.starting_sugar)\n \n \n if self.reproduction_and_death:\n if self.age > self.max_age: # Agent dies\n # Tax inheritance\n self.model.inheritance_tax_agent(self)\n \n if self.model.spawn_at_random:\n self.gen += 1\n x = self.model.random.randrange(self.model.grid.width)\n y = self.model.random.randrange(self.model.grid.height)\n new_pos = (x,y)\n \n self.model.add_agent(Consumer, new_pos, f\"{self.unique_id.split('-')[0]}-{self.gen}\", self.gen, self.model.vision, self.model.metabolism, self.model.starting_sugar)\n self.model.remove_agent(self) #agent dies\n \n \n else:\n #spawn new agent\n self.gen += 1\n if self.sugar != 0:\n self.model.add_agent(Consumer, self.pos, f\"{self.unique_id.split('-')[0]}-{self.gen}\", self.gen, self.vision, self.metabolism, self.sugar)\n else:\n self.model.add_agent(Consumer, self.pos, f\"{self.unique_id.split('-')[0]}-{self.gen}\", self.gen, self.vision, self.metabolism, self.model.starting_sugar)\n \n self.model.remove_agent(self) #agent dies", "def apply_action(self, action):\n robot_state = self.get_state('turtlebot3_waffle_pi','world')\n robot_x = robot_state.pose.position.x\n robot_y = robot_state.pose.position.y\n # Set the distance moved in an action such that it is at least as large as the\n # minimum distance that would let a robot in the middle of the goal go to either side\n #self.move_dist = max(((C.GOAL_TOP + C.GOAL_BOTTOM) / 2) / C.NUM_POS_SENDS, 0.5)\n if action == Learn.MOVE_LEFT:\n print(\"Move left\")\n self.set_robot(robot_x, robot_y+self.move_dist)\n elif action == Learn.MOVE_RIGHT:\n print(\"Move right\")\n self.set_robot(robot_x, robot_y-self.move_dist)\n else:\n print(\"Stay put\")", "def collision(self, newWeight):\n self.setRandDirection() # Sample new direction\n self.weight = newWeight # Change neutron's weight", "def ship_took_damage(self, damage: Damage):\n pass", "def retractionPoint(ball_loc, point, direction, t, delta = 0.999 ):\n # ball radius is given in meters\n ball_radius = 33.42\n force_direction = direction\n #print \"dir\", direction\n #print ball_radius\n # where to kick the ball\n contact_point = ball_loc - (direction * ball_radius) \n (retract_distance ,output) = g(point, contact_point, force_direction, ball_loc, t)\n \n return contact_point, (1 - delta) * retract_distance + delta * (output)", "def execute(self, cast):\n ball = cast[\"ball\"][0] # there's only one\n paddle = cast[\"paddle\"][0] # there's only one\n bricks = cast[\"brick\"]\n for brick in bricks:\n if ball.get_position().equals(brick.get_position()):\n bricks.remove(brick)\n ball.set_velocity(Point.reverse_y(ball.get_velocity()))\n\n if ball.get_position().get_y() == paddle.get_position().get_y():\n if ball.get_position().get_x() >= paddle.get_position().get_x() and ball.get_position().get_x() <= (paddle.get_position().get_x() + 11):\n ball.set_velocity(Point.reverse_y(ball.get_velocity()))\n\n if ball.get_position().get_y() == 0:\n ball.set_velocity(Point.reverse_y(ball.get_velocity()))\n \n if ball.get_position().get_x() == 0 or ball.get_position().get_x() == constants.MAX_X:\n ball.set_velocity(Point.reverse_x(ball.get_velocity()))\n\n if ball.get_position().get_y() == constants.MAX_Y:\n quit()", "def test_climb(self):\n fcs = flight_control.FlightControl(**self.start)\n\n altitude = 120.0\n pitch_angle = 10.0\n alt_cmd = cmds.SetAltitudeCmd(120.0, 10.0)\n fcs.set_altitude_cmd(alt_cmd)\n\n #heading = 150.0\n #heading_cmd = cmds.SetHeadingCmd(heading)\n #fcs.set_direction_cmd(heading_cmd)\n\n t = self.tmin\n while t < self.tmax:\n fcs.tick(t, self.dt)\n t += self.dt\n\n self.assertAlmostEqual(fcs.platform.z, altitude, delta=1.0)\n self.assertAlmostEqual(fcs.platform.theta_c, 0.0, delta=1.0)", "def moveBricks(questionBricks, interactBricks, breakingBrick):\n BRICKVY, IDLE, TYPE = 4, 5, 6\n # Moving all question blocks that are hit\n for brick in questionBricks: # Going through each question block\n if brick[BRICKVY] != 3.5 and brick[IDLE] == 1: # Checking if the block is back at its original position or idle\n brick[BRICKVY] += 0.5 # Adding VY\n brick[1] += brick[BRICKVY] # Applying gravity\n # Moving all bricks that are hit and resetting them after\n for brick in interactBricks:\n if brick[BRICKVY] != 3.5 and brick[IDLE] == 1:\n brick[BRICKVY] += 0.5\n brick[1] += brick[BRICKVY]\n else: # Resetting the brick\n brick[BRICKVY] = 0\n brick[IDLE] = 0\n # Moving all brick debris\n for brick in breakingBrick: # Going through all of the debris and adding gravity/motion\n brick[1] += brick[4]\n brick[4] += 0.8\n brick[5] += 3", "def _doDryGripper(self):\n self._cmdDry(2)", "def battle_resting(self):\n pass", "def plant_food(self):\n self.phase.set(0)\n #self.broadcast_phase()\n self.players[self.first_player].take_turn()", "def drive(self, miles_driven):\n self.miles_driven = miles_driven\n self.gallons_of_gas -= miles_driven / 10", "def goToBall(state):\n return goTo(state, state.ball_pos)", "def burn_step(self):\n change = np.full((self.width, self.height), 0)\n for x in range(0, self.width - 1):\n for y in range(0, self.height - 1):\n # How fast we go through the fuel\n if random.randrange(2) == 0:\n self.fire_check_point(x, y, change)\n\n self.temp = np.maximum(change, self.temp)", "def jump(self, xvel = 0, yvel = 0): #TODO: figure out how a monster's jumping ability is determined.\n self.xvel += xvel\n self.yvel -= yvel\n self.animation.iter()\n self.ai_count = 25 #TEMP\n self.onGround = False", "def hanoi_tower(n):\n def move_tower(height, from_pole, to_pole, with_pole):\n if height >= 1:\n move_tower(height-1, from_pole, with_pole, to_pole)\n move_disc(from_pole, to_pole, height)\n move_tower(height-1, with_pole, to_pole, from_pole)\n\n def move_disc(fp, tp, height):\n tp.append(fp.pop())\n show_poles()\n\n def show_poles():\n line = \"\"\n line += '{:<{}s}'.format(str(a), str_len)\n line += '{:<{}s}'.format(str(b), str_len)\n line += '{:<{}s}'.format(str(c), str_len)\n print(line)\n\n a = list(range(n, 0, -1))\n b = []\n c = []\n str_len = len(str(a))\n\n show_poles()\n move_tower(n, a, c, b)\n print('\\n')", "def step(self, crowd):\n\n for boid in crowd:\n random_int = random.randint(0, 5)\n\n # if random_int > 4:\n # random_int = random.randint(0, 5)\n # if random_int > 4:\n # for i in range (1, 500):\n # goalX, goalY = self.goals[boid.goalNr]\n # x, y = boid.position\n\n # if (goalX + 10 >= x >= goalX - 10) and (goalY + 10 >= y >= goalY - 10):\n # boid.reached_goal(goalX + 10, goalY + 10)\n\n # dx = random.randint(0, self.width) - x\n # dy = random.randint(0, self.height) - y\n\n # # Unit vector in the same direction\n # distance = math.sqrt(dx * dx + dy * dy)\n # dx /= distance\n # dy /= distance\n\n # # And now we move:\n # x += dx\n # y += dy\n\n # boid.set_goal(dx, dy)\n\n # boid.position += boid.velocity\n #else:\n # boid.position += boid.velocity\n \n # Vector from me to cursor\n\n\n goalX, goalY = self.goals[boid.goalNr]\n x, y = boid.position\n\n if (goalX + 10 >= x >= goalX - 10) and (goalY + 10 >= y >= goalY - 10):\n boid.reached_goal(goalX + 10, goalY + 10)\n\n else:\n dx = goalX - x\n dy = goalY - y\n\n # Unit vector in the same direction\n # distance = np.linalg.norm(dx * dx + dy * dy)\n distance = math.sqrt(dx * dx + dy * dy)\n dx /= distance\n dy /= distance\n\n # And now we move:\n x += dx\n y += dy\n\n boid.set_goal(dx, dy)\n\n boid.position += boid.velocity", "def shift_point_circ(mutated_genome,index):\n Xval = random.randint(-int(imagewidth*0.1),int(imagewidth*0.1))\n Yval = random.randint(-int(imageheight*0.1),int(imageheight*0.1))\n circle = mutated_genome[index][2]\n newcircle = (circle[0]+Xval,circle[1]+Yval,circle[2])\n mutated_genome[index][2] = newcircle", "def action_rapel(self):\n self.state = 'rapel'\n self.state_rapel = '1'", "def _walk(self):\n new_pos = self.rect.move((self.move, 0)) # move 9 pixel to the right per frame\n if self.rect.left < self.area.left or self.rect.right > self.area.right:\n self.move = -self.move # move to the opposite direction when the chimp position exceeds the screen\n new_pos = self.rect.move((self.move, 0))\n self.image = pygame.transform.flip(\n self.image, 1, 0\n ) # mirror the chimp to make it looks like turning around\n self.rect = new_pos", "def _think(self):\n\n # DEBUG: tells us if a thread dies\n if not self._think_thread.is_alive() or not self._msg_thread.is_alive():\n raise Exception(\"A thread died.\")\n\n # take places on the field by uniform number\n if not self.in_kick_off_formation:\n print('the side is {}'.format(self.wm.side))\n\n # used to flip x coords for other side\n side_mod = 1\n if self.wm.side == WorldModel.SIDE_R:\n side_mod = -1\n\n if self.wm.uniform_number == 9:\n self.wm.teleport_to_point((-5 * side_mod, 30))\n elif self.wm.uniform_number == 2:\n self.wm.teleport_to_point((-40 * side_mod, 15))\n elif self.wm.uniform_number == 3:\n self.wm.teleport_to_point((-40 * side_mod, 00))\n elif self.wm.uniform_number == 4:\n self.wm.teleport_to_point((-40 * side_mod, -15))\n elif self.wm.uniform_number == 5:\n self.wm.teleport_to_point((-5 * side_mod, -30))\n elif self.wm.uniform_number == 6:\n self.wm.teleport_to_point((-20 * side_mod, 20))\n elif self.wm.uniform_number == 7:\n self.wm.teleport_to_point((-20 * side_mod, 0))\n elif self.wm.uniform_number == 8:\n self.wm.teleport_to_point((-20 * side_mod, -20))\n elif self.wm.uniform_number == 1:\n self.wm.teleport_to_point((-10 * side_mod, 0))\n elif self.wm.uniform_number == 10:\n self.wm.teleport_to_point((-10 * side_mod, 20))\n elif self.wm.uniform_number == 11:\n self.wm.teleport_to_point((-10 * side_mod, -20))\n\n self.in_kick_off_formation = True\n\n return\n\n if (not self.wm.is_before_kick_off() and self.wm.play_mode != self.wm.PlayModes.TIME_OVER) \\\n or self.wm.is_kick_off_us() or self.wm.is_playon():\n # The main decision loop\n return self.decisionLoop()\n\n if self.wm.play_mode == self.wm.PlayModes.TIME_OVER and not self.saved:\n if self._agent.save_traj:\n self._agent.save_dataset()\n\n # Report metrics of the classifier if it is not being trained during execution.\n if self._agent.clone:\n self._agent.report_results()\n\n # Report the stats of visited states and actions taken.\n self.stats.save()\n\n self.saved = True", "def pain(self, int):\n self.vel[1] = int", "def water_uptake_campbell(self, soil):\r\n daily_ref_evap_transp = soil.daily_ref_evap_transp\r\n root_hydr_cond = np.zeros(soil.total_layers)\r\n shoot_hydr_cond = np.zeros(soil.total_layers)\r\n plant_hydr_cond = np.zeros(soil.total_layers)\r\n root_activity = np.zeros(soil.total_layers)\r\n root_cond_adj = np.zeros(soil.total_layers)\r\n tot_root_cond_adj = 0\r\n salinity_factor = np.zeros(soil.total_layers)\r\n soil_water_pot_avg = 0\r\n WAT_POT_FIELD_CAP = -33\r\n\r\n # Transpiration\r\n self.pot_transp = daily_ref_evap_transp * self.light_intercpt\r\n self.max_pot_transp = (self.campbell_max_daily_transp *\r\n self.light_intercpt)\r\n self.expect_transp = min(self.pot_transp, self.max_pot_transp) # mm/day\r\n\r\n # Plant hydraulic conductance (kg s m-4)\r\n tot_plant_hydr_cond = (self.max_pot_transp /\r\n (WAT_POT_FIELD_CAP -\r\n self.leaf_water_pot_stress_onset))\r\n # assumption of 2/3 of plant hydraulic conductance is from roots\r\n tot_root_hydr_cond = tot_plant_hydr_cond / 0.65\r\n # assumption of 1/3 of plant hydraulic conductivity is from shoots\r\n tot_shoot_hydr_cond = tot_plant_hydr_cond / 0.35\r\n\r\n for lyr in soil.layers:\r\n root_activity[lyr] = 1\r\n salinity_factor[lyr] = 1\r\n root_cond_adj[lyr] = (root_activity[lyr] * self.root_fraction[lyr]\r\n * salinity_factor[lyr])\r\n root_hydr_cond[lyr] = tot_root_hydr_cond * root_cond_adj[lyr]\r\n tot_root_cond_adj += root_cond_adj[lyr]\r\n\r\n # Root, shoot and plant hydraulic conductance(kg s m-4)\r\n for lyr in soil.layers:\r\n if root_cond_adj[lyr] > 0:\r\n shoot_hydr_cond[lyr] = (tot_shoot_hydr_cond *\r\n root_cond_adj[lyr] / tot_root_cond_adj)\r\n plant_hydr_cond[lyr] = (root_hydr_cond[lyr] *\r\n shoot_hydr_cond[lyr] /\r\n (root_hydr_cond[lyr] +\r\n shoot_hydr_cond[lyr]))\r\n else:\r\n plant_hydr_cond[lyr] = 0\r\n\r\n tot_root_hydr_cond *= tot_root_cond_adj\r\n tot_plant_hydr_cond = ((tot_root_hydr_cond * tot_shoot_hydr_cond) /\r\n (tot_root_hydr_cond + tot_shoot_hydr_cond))\r\n\r\n if tot_plant_hydr_cond > 0:\r\n for lyr in soil.layers:\r\n soil_water_pot_avg += (soil.water_potential[lyr] *\r\n root_cond_adj[lyr])\r\n leaf_water_pot = (soil_water_pot_avg - self.expect_transp /\r\n tot_plant_hydr_cond)\r\n if leaf_water_pot < self.leaf_water_pot_stress_onset:\r\n leaf_water_pot = ((tot_plant_hydr_cond * soil_water_pot_avg *\r\n (self.leaf_water_pot_stress_onset -\r\n self.leaf_water_pot_wilt_point) +\r\n self.leaf_water_pot_wilt_point *\r\n self.expect_transp)\r\n / (tot_plant_hydr_cond *\r\n (self.leaf_water_pot_stress_onset -\r\n self.leaf_water_pot_wilt_point) +\r\n self.expect_transp))\r\n if leaf_water_pot < self.leaf_water_pot_wilt_point:\r\n leaf_water_pot = self.leaf_water_pot_wilt_point\r\n self.att_transp = 0\r\n transp_ratio = self.att_transp / self.expect_transp\r\n\r\n elif leaf_water_pot < self.leaf_water_pot_stress_onset:\r\n self.att_transp = (self.expect_transp * (leaf_water_pot -\r\n self.leaf_water_pot_wilt_point) / (\r\n self.leaf_water_pot_stress_onset -\r\n self.leaf_water_pot_wilt_point))\r\n transp_ratio = self.att_transp / self.expect_transp\r\n\r\n else:\r\n self.att_transp = self.expect_transp\r\n transp_ratio = 1\r\n # crop water uptake (kg/m2/d = mm/d)\r\n for lyr in soil.layers:\r\n self.water_uptake[lyr] = (plant_hydr_cond[lyr] *\r\n (soil.water_potential[lyr] -\r\n leaf_water_pot) * transp_ratio)\r\n if self.water_uptake[lyr] < 0:\r\n self.water_uptake[lyr] = 0\r\n self.crop_transp = self.water_uptake.sum() # mm/day\r\n self.cum_transp += self.crop_transp\r\n self.cum_pot_transp += self.expect_transp\r\n self.transp_ratio = self.crop_transp / self.expect_transp" ]
[ "0.61283803", "0.58139145", "0.57736254", "0.56313497", "0.56194353", "0.5569861", "0.55651504", "0.55542326", "0.5510675", "0.55064046", "0.55052066", "0.54641354", "0.54107416", "0.5409724", "0.53897583", "0.53766644", "0.53678995", "0.53670925", "0.53559333", "0.5348638", "0.5338287", "0.5337605", "0.53360903", "0.53323203", "0.53086495", "0.5305337", "0.53052795", "0.530438", "0.5296983", "0.5289254", "0.5284989", "0.5277517", "0.5247705", "0.5247259", "0.5237249", "0.52355576", "0.5222112", "0.521403", "0.5201775", "0.520071", "0.519935", "0.51871854", "0.51863843", "0.516722", "0.51498497", "0.51495785", "0.51470137", "0.51271784", "0.51163906", "0.5115323", "0.51137406", "0.51128364", "0.51128364", "0.51013005", "0.5100076", "0.50902146", "0.5089041", "0.50889903", "0.50887537", "0.508236", "0.5078654", "0.5064126", "0.50624007", "0.505934", "0.50495595", "0.5048696", "0.50396484", "0.50377655", "0.50338537", "0.50320363", "0.503165", "0.50288075", "0.50256294", "0.50216573", "0.50160027", "0.50152016", "0.5010683", "0.5010142", "0.50056493", "0.5004495", "0.50032073", "0.5001901", "0.5000215", "0.5000143", "0.49973905", "0.4988582", "0.49882966", "0.49838325", "0.4977507", "0.4977395", "0.4967769", "0.49671146", "0.49664912", "0.49609464", "0.4957649", "0.49568874", "0.4956314", "0.49471483", "0.49453163", "0.49450845", "0.49442902" ]
0.0
-1
Shows the guild's icon.
async def guild_icon( event, choice: (GUILD_ICON_CHOICES, 'Which icon of the guild?' ) = 'icon', ): guild = event.guild if (guild is None) or guild.partial: return Embed('Error', 'The command unavailable in guilds, where the application\'s bot is not in.') if choice == 'icon': name = 'icon' url = guild.icon_url_as(size = 4096) hash_value = guild.icon_hash elif choice == 'banner': name = 'banner' url = guild.banner_url_as(size = 4096) hash_value = guild.banner_hash elif choice == 'discovery_splash': name = 'discovery splash' url = guild.discovery_splash_url_as(size = 4096) hash_value = guild.discovery_splash_hash else: name = 'invite splash' url = guild.invite_splash_url_as(size = 4096) hash_value = guild.invite_splash_hash if url is None: color = (event.id >> 22) & 0xFFFFFF return Embed(f'{guild.name} has no {name}', color = color) color = hash_value & 0xFFFFFF return Embed(f'{guild.name}\'s {name}', color = color, url = url).add_image(url)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def icon(self, ctx: lifesaver.Context):\n if not ctx.guild.icon:\n await ctx.send(\"This server doesn't have a custom icon.\")\n return\n\n await ctx.send(ctx.guild.icon.replace(format=\"png\"))", "def icon(self) -> str:\n return ICON_SERVER", "def icon(self):\n return \"mdi:hubspot\"", "def icon(self):", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return ICON", "def icon(self):\n return self.ICON", "def icon(self):\n return self.ICON", "async def change_icon(self, icon_content: bytes):\n if not self.me.guild_permissions.manage_server:\n raise PermissionsError(\"manage_server\")\n\n image = base64ify(icon_content)\n await self._bot.http.edit_guild(self.id,\n icon_content=image)", "def icon(self):\n ret_icon = self._icon\n if self.player_name == \"lower\":\n ret_icon = self._icon.lower()\n if self.is_promoted:\n ret_icon = \"+\" + ret_icon\n return ret_icon", "def icon(self) -> str:\n return self._icon", "def icon(self) -> str:\n return self._icon", "def icon(self) -> str:\n return self._icon", "def icon(self):\n return self.__icon", "def get_icon(self):\n return self.ICON", "def getIcon(self):\n return \":/icons/Ship_Instance.svg\"", "def getIconUrl(self):\n return \"%s/static/war/images/achievements/%s.png\" % (serverString, self.id)", "def icon(self):\n return 'mdi:broom'", "def icon(self):\n return None", "def icon(self):\n return None", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return self._icon", "def icon(self):\n return DEFAULT_ICON", "def icon(self):\r\n return self._icon", "def icon(self):\n if \"icon\" in self._typeconf:\n return self._typeconf[\"icon\"]", "def icon(self):\n return ICON_BUS", "def icon(self):\r\n try:\r\n return self.data['icon_url_base']+self.data['icon_url_name']\r\n except KeyError:\r\n return ''", "def icon(self):\n return \"mdi:solar-power\"", "def get_bot_icon(self):\n return self.bot_data_file[\"bot_icon\"]", "def icon(self):\n return self._config.get(CONF_ICON)", "def icon(self):\n return \"mdi:speaker\"", "def get_icon(self):\r\n return get_icon(self.ICON)", "def get_icon(self):\n return self._icon", "def get_icon(self):\n raise NotImplementedError", "def icon(self):\n return self._var_icon", "def icon(self):\n return self._var_icon", "def get_icon(self):\n\t\treturn QIcon()", "def icon(self) -> str | None:\n return self._icon", "def embed_url(self) -> str:\n return (Endpoints.GUILD_BASE + \"/embed.png\").format(guild_id=self.id)", "async def icon(self):\n if not hasattr(self, \"_icon\"):\n self._icon = await Stack.fetch_stack_value(self, \"https://www.w3.org/1999/xhtml/vocab#icon\", await self.uuid)\n return self._icon", "def icon(self):\n return self.var_icon", "def icon(self):\n return self._sensor[CONF_ICON]", "def icon(self):\n return STATUSES.get(self._mower_status, {}).get('icon', DEFAULT_ICON)", "def icon(self):\r\n return \"mdi:light-switch\"", "def icon(self) -> str:\n return ICON_CORAL", "def icon(self) -> str:\n return ICON_CORAL", "def icon(self, icon):\n self._icon = icon", "def icon(self) -> Icon:\n return self._icon", "def get_icon(self):\r\n raise NotImplementedError", "def _icons(self):", "def setIconImage(*args):", "def icon(self):\n return 'mdi:text-to-speech'", "def icon(self):\r\n icon_path = \":/plugins/pdok_services/icon.png\"\r\n icon = QtGui.QIcon(icon_path)\r\n return icon", "def icon(self):\n return self._metadata[2]", "def showEmoticonList(self):\n print \"Guess what? No emoticons. But I'll put in a random one for you\"\n self.appendImageAtCursor(\"throbber.gif\")", "def DrawIcon(*args, **kwargs):\n return _gdi_.DC_DrawIcon(*args, **kwargs)", "def icon(self):\n return \"mdi:brightness-6\"", "def DrawIcon(*args, **kwargs):\n return _gdi_.PseudoDC_DrawIcon(*args, **kwargs)", "def icon(self):\n return \"mdi:car\"", "def get_icon(self):\n\n return self._icon", "def icon(self) -> str:\n return \"mdi:thermometer\"", "def getIcon():\n\treturn \"Animator.png\"", "def icon(self):\n\n # look for icon one level up from this hook's folder in \"icons\" folder\n return os.path.join(\n self.disk_location,\n os.pardir,\n \"icons\",\n \"review.png\"\n )", "def icon(self) -> str:\n return icon_for_battery_level(\n battery_level=self._device.battery_level,\n charging=self._device.battery_status == \"Charging\",\n )", "async def imageShow(self, ctx: Context, name: str, imageType=\"icons\"):\n imageSingular = self.getSingularImageType(imageType)\n\n # Check to see if this icon exists in dictionary\n images = await self.config.guild(ctx.guild).get_attr(imageType)()\n if name not in images.keys():\n await ctx.send(f\"This {imageSingular} doesn't exist!\")\n return\n\n filepath = self.getFullFilepath(ctx.guild, images[name], imageType=imageType)\n\n # Send file to discord\n try:\n image = discord.File(filepath, filename=images[name][\"filename\"])\n await ctx.send(file=image)\n except FileNotFoundError:\n await ctx.send(error(\"The file does not exist\"))\n self.logger.error(\"File does not exist %s\", filepath)", "def get_icon(self) -> Dict[str, Any]:\n player = self._last_sessionplayer\n assert player is not None\n return player.get_icon()", "def icon(self):\n return \"mdi:sync\" if self.is_on else \"mdi:sync-off\"", "def set_icon(self, icon):\n icon = icon.title()\n if icon in self.rewards:\n self.icon = icon", "async def _server(self, ctx: commands.Context) -> None:\n\n guild = ctx.guild\n\n embed = CleanEmbed(\n author_image=guild.icon_url,\n author_text=guild.name,\n thumbnail_url=guild.icon_url,\n fields=[\n {'name': 'Owner', 'value': f'{guild.owner.name}#{guild.owner.discriminator}', 'inline': True},\n {'name': 'ID', 'value': guild.id, 'inline': True},\n {'name': 'Members', 'value': guild.member_count, 'inline': True},\n {'name': 'Channels',\n 'value': f'{(len(guild.text_channels) + len(guild.voice_channels))} (+ {len(guild.categories)} categories)',\n 'inline': True},\n {'name': 'Region', 'value': GUILD_REGIONS[guild.region], 'inline': True},\n {'name': 'Emojis', 'value': len(guild.emojis), 'inline': True},\n {'name': 'Tier', 'value': f'{guild.premium_tier} ({guild.premium_subscription_count} boosts)',\n 'inline': True},\n {'name': 'Verification', 'value': GUILD_VERIFICATION_LEVELS[guild.verification_level], 'inline': True},\n {'name': 'Created', 'value': guild.created_at.strftime(\"%d %B, %Y\"), 'inline': True},\n ])\n\n await ctx.send(embed=embed)", "def get_icon_name(self):\n return 'gramps-notes'", "def show(self, nid =None):\r\n flags = NIF_ICON | NIF_MESSAGE\r\n if nid is None:\r\n nid = (self.hwnd, 0, flags, WM_USER+20, self.hicon)\r\n if self.visible:\r\n self.hide()\r\n Shell_NotifyIcon(NIM_ADD, nid)\r\n self.visible = 1", "def icon(self) -> str | None:\n value = self.entity_description.icon\n if self.entity_description.key == \"weather\":\n value = self.state\n if value is None:\n value = \"sunny\"\n elif value == \"partlycloudy\":\n value = \"partly-cloudy\"\n value = f\"mdi:weather-{value}\"\n\n return value", "async def discord(self, ctx):\n embed = discord.Embed(title='Join the discord today!', color=0x5643fd, description=\"This server is where \"\n \"all of \"\n \"NOVA's updates and \"\n \"important \"\n \"announcements will pass \"\n \"through. The creator of \"\n \"this \"\n \"bot, YeetVegetabales#5313, \"\n \"will also be there testing \"\n \"and letting the communtiy \"\n \"in \"\n \"on things first hand!\")\n embed.set_thumbnail(url='https://images-ext-2.discordapp.net/external/AQCEqCF4Yl_PWAfuA-GReZoDify6'\n '--y4hXOJVkqaDHo/%3Fsize%3D1024/https/cdn.discordapp.com/avatars/709922850953494598'\n '/f78ed19924e8c95abc30f406d47670d7.png')\n embed.add_field(name='Server Invite', value='<:news:730866149109137520> '\n '[Join here](https://discord.gg/Uqh9NXY)')\n await ctx.send(embed=embed)", "def icon(self):\n return \"mdi:currency-gbp\"" ]
[ "0.79320234", "0.7129427", "0.6738884", "0.6581648", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6533175", "0.6479007", "0.6479007", "0.63814235", "0.6271272", "0.62624985", "0.62624985", "0.62624985", "0.62273455", "0.62197846", "0.61817753", "0.61782724", "0.6158226", "0.6147599", "0.6147599", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.6130299", "0.61022395", "0.6083852", "0.60765773", "0.60643274", "0.60623574", "0.6056551", "0.60002637", "0.5995184", "0.5984601", "0.5980961", "0.5978998", "0.59506655", "0.59347713", "0.59347713", "0.5931625", "0.59303606", "0.5924896", "0.59202737", "0.59190017", "0.59134066", "0.5909362", "0.5887786", "0.5867649", "0.5867649", "0.58648306", "0.58626324", "0.5856357", "0.5852533", "0.5840767", "0.58399916", "0.58372015", "0.5835744", "0.58221203", "0.5819172", "0.57920337", "0.57910335", "0.57700425", "0.57667094", "0.57573426", "0.57472175", "0.57449573", "0.57439953", "0.57339734", "0.57249", "0.5712939", "0.57079786", "0.57010746", "0.5662593", "0.5660876", "0.5635742", "0.563088", "0.562799" ]
0.754903
1
Converts the given Discord snowflake to time.
async def id_to_datetime_( snowflake: ('int', 'Id please!'), ): time = id_to_datetime(snowflake) return f'{time:{DATETIME_FORMAT_CODE}}\n{elapsed_time(time)} ago'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def epoch2time(time):\n\tvalue = datetime.datetime.fromtimestamp(time)\n\tNormal = value.strftime('%Y-%m-%d %H:%M:%S')\n\tprint(normal)\n\treturn normal", "def conv_time(stamp):\n value = datetime.fromtimestamp(stamp)\n return value.strftime('%Y-%m-%d %H:%M:%S')", "def get_time(text_time):\n # return Observer.datetime_to_astropy_time(dt.datetime.strptime(text_time, '%d/%m/%Y %H:%M'))\n the_time = dt.datetime.strptime(text_time, '%d/%m/%Y %H:%M')\n return Time(the_time.strftime('%Y-%m-%d %H:%M'))\n #date = [int(i) for i in date.split('/')]", "def convert_time(slog_time_str):\n \n base_time = datetime.datetime(2007, 1, 1)\n delta = datetime.timedelta(0, float(slog_time_str))\n \n timestamp = base_time + delta\n taml_dtg = timestamp.strftime('%Y-%m-%dT%H:%M:%S')\n return taml_dtg", "def convert_time(t):\n return datetime.fromtimestamp(t / 1e7 - 11644473600)", "async def toEpochTime(self, ctx, *, timeStr:str):\n\t\t_, time = (search_dates(\n\t\t\ttimeStr.upper(), settings={'RETURN_AS_TIMEZONE_AWARE': True})[0])\n\t\tawait ctx.send(f\"`{int(time.timestamp())}` is the timestamp for `{time.strftime('%c in timezone %Z')}`\\nThe basic timestamp would look like this: <t:{int(time.timestamp())}:F>\")", "def tweet_time(tweet):\n return tweet['time']", "def gh_to_timestamp(time_str: str, server: str) -> int:\n dt = ghtime(time_str, server)\n return int(dt.timestamp())", "def convert_to_time(value):\n if isinstance(value, datetime.time):\n return value\n elif isinstance(value, str):\n return datetime.time.fromisoformat(value)\n else:\n return datetime.time(value)", "def gps2Time(self):\n self.posting_date = Time(self.posting_gpstime, format=\"gps\")", "def ConvertTime( self ) :\n \n # modules:\n import logging\n import datetime\n import netCDF4\n import numpy\n \n #\n # Original 'Time' units and description:\n #\n # title = \"Time at Start of Scan (s, UNIX time / POSIX time), number of seconds that have elapsed since midnight Coordinated Universal Time (UTC), 1 January 1970.\"\n # units = \"s\"\n #\n # Create new field 'Datetime' field with units:\n # units = \"Seconds since 1970-01-01 00:00'\n #\n # select:\n varid = self.swaths[self.component]['Geolocation Fields']['Time']\n # values:\n tvalues = varid['data']\n # extract description:\n long_name = varid['long_name'].decode('latin-1')\n # check ...\n key = 'Time at Start of Scan (s, UNIX time / POSIX time), number of seconds that have elapsed since midnight Coordinated Universal Time (UTC),'\n if long_name.startswith(key) :\n # remove leading description:\n time0 = long_name.replace(key,'').replace('.','').strip()\n # extract datetime object:\n t0 = datetime.datetime.strptime(time0,'%d %B %Y')\n # convert:\n var = {}\n var['units' ] = t0.strftime('seconds since %Y-%m-%d %H:%M:%H')\n var['long_name'] = long_name\n if 'mask' in dir(tvalues) :\n values1d = netCDF4.num2date( tvalues.data, var['units'] )\n else :\n values1d = netCDF4.num2date( tvalues , var['units'] )\n #endif\n # alternative:\n # \"Time at Start of Scan (s, TAI93)\"\n elif 'TAI' in long_name :\n # find start:\n i0 = long_name.index('TAI')\n # extract:\n year = int(long_name[i0+3:].replace(')',''))\n # convert to 4-digits if necessary:\n if year < 100 :\n if year > 50 :\n year = 1900 + year\n else :\n year = 2000 + year\n #endif\n #endif\n # reference time:\n t0 = datetime.datetime(year,1,1,0,0,0)\n # convert:\n var = {}\n var['units' ] = t0.strftime('seconds since %Y-%m-%d %H:%M:%H')\n var['long_name'] = long_name\n values1d = netCDF4.num2date( tvalues, var['units'] )\n else :\n self.logger.error( 'could not convert time units \"%s\"' % long_name )\n self.logger.error( 'first value : %f' % tvalues[0] )\n raise Exception\n #endif\n \n # expand to 2D:\n var['data'] = numpy.zeros( (self.ntime,self.np), values1d.dtype )\n for ip in range(self.np) :\n var['data'][:,ip] = values1d\n #endfor\n \n # set dim names:\n var['dimnames'] = ('time','pixel')\n \n # store:\n self.swaths[self.component]['Geolocation Fields']['Datetime'] = var", "def _get_time(self): \n\t\t# need to variable-ize the version ??? \n\t\ttime = self.root.find('.//{http://www.opengis.net/kml/2.2}when').text\n\t\t## strip off last 5 chars, ie '.135Z in '2015-08-01T00:06:29.135Z'\n\t\tutc = dateutil.tz.tzutc() \n\t\tcentral = dateutil.tz.gettz('America/Chicago')\n\t\ttime = datetime.datetime.strptime(time[:-5], '%Y-%m-%dT%H:%M:%S')\n\t\ttime = time.replace(tzinfo=utc)\n\t\tself.time = time.astimezone(central)", "def ntp_to_system_time(date):\n return date - NTP_DELTA", "def from_discord(self):\n reason = \"[!] Discord timestamps are 18 digits or longer\"\n ts_type = self.ts_types['discord']\n try:\n if len(str(self.discord)) < 18 or not self.discord.isdigit():\n self.in_discord = indiv_output = combined_output = False\n pass\n else:\n unix_ts = (int(self.discord) >> 22) + 1420070400000\n self.in_discord = dt.utcfromtimestamp(float(unix_ts) / 1000.0).strftime('%Y-%m-%d %H:%M:%S.%f')\n indiv_output = str(\"{} {}\".format(ts_type, self.in_discord))\n combined_output = str(\"{}{}\\t\\t\\t{} UTC{}\".format(self.left_color, ts_type, self.in_discord, self.right_color))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.in_discord = indiv_output = combined_output = False\n return self.in_discord, indiv_output, combined_output, reason", "def convertTime(self, sec):\n\n if self.timeFormat == S:\n return '%.3f' % sec\n\n if self.timeFormat == HHMMSS:\n return seconds2time(sec)", "def time_cmd(args):\n result = requete(\"Time:getTime\")\n if result:\n t = result['data']['time']\n result = requete(\"Time:getLocalTimeZoneName\")\n tz = result['data']['timezone']\n print(\"Livebox time: {} ({})\".format(t, tz))", "def test_as_time(self):\n self.assertEqual(\n time_display.as_time(\n datetime(2020, 7, 31, 23, 59, 30, 357921),\n with_msec=True),\n '23:59:30.357')", "def getTime(toConvert = None):\n if toConvert == None:\n return time.mktime(\n datetime.datetime.now().timetuple()\n )\n else:\n return time.mktime(\n toConvert.timetuple()\n )", "def adapt_timefield_value(self, value):\n if value is None:\n return None\n \n # Expression values are adapted by the database.\n if hasattr(value, 'resolve_expression'):\n return value\n # SQL Server doesn't support microseconds\n if isinstance(value, string_types):\n return datetime.datetime(*(time.strptime(value, '%H:%M:%S')[:6]))\n if timezone.is_aware(value):\n raise ValueError(\"DBMaker backend does not support timezone-aware times.\")\n return datetime.time(value.hour, value.minute, value.second)", "def round_trip_time(self):\n ...", "def get_time(self):\r\n\t\tactual_time = '{}, {} de {} del {} ({}:{} {} (UTC))'\r\n\t\tda_taim = actual_time.format(self.wday, self.day, self.mon,\r\n\t\t\t\t\t\t\t\t\t self.year, self.hour, self.min,\r\n\t\t\t\t\t\t\t\t\t self.day_or_night)\r\n\t\treturn da_taim", "def system_to_ntp_time(date):\n return date + NTP_DELTA", "def main():\n date_time_conversion('2018-12-30T09:37:56.000001Z', '2020-07-12T07:56:43.000001Z', 0, 0, 0, 0)", "def Time(row):\r\n try:\r\n timeadd = dt.datetime.strptime(row['TickIssueTime'], '%H:%M').time()\r\n except:\r\n timeadd = dt.datetime.strptime('00:00', '%H:%M').time()\r\n\r\n newtime = dt.datetime.combine(dt.datetime.strptime(row['TickIssueDate'], '%Y-%m-%d %H:%M:%S') , timeadd)\r\n return newtime", "def getTime(self, request, context):\n\t\t\n date = re.split(\"\\s\", datetime.utcnow().strftime(\"%Y %m %d %H %M %S\"))\n\n return droneconnect_pb2.Time(year = int(date[0]), month = int(date[1]), day = int(date[2]), hour = int(date[3]), minute = int(date[4]), second = int(date[5]))", "def time_convert(time):\n try:\n time_data = str(time)\n if time_data:\n try:\n time_data = datetime.strptime(time_data, '%Y%m%d')\n except Exception:\n time_data = datetime.strptime(time_data, '%Y%m%d%H%M%S')\n time_data = time_data.strftime('%Y-%m-%d')\n return time_data\n except Exception:\n return False", "def forge_timestamp(value) -> int:\n assert isinstance(value, str)\n dt = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')\n return calendar.timegm(dt.utctimetuple())", "def date_to_python(self, value):\r\n # this throws away fractions of a second\r\n return datetime(*strptime(value[:-5], \"%Y-%m-%dT%H:%M:%S\")[0:6])", "def _hx_time_to_epoch(self, timestr: str) -> int: # pragma: no cover\n\n time_obj = datetime.datetime.strptime(timestr, \"%Y-%m-%dT%H:%M:%S.%fZ\")\n\n return int(time_obj.strftime(\"%s\"))", "def _time_str(self):\n try:\n if not self._time:\n raise ValueError\n format_ = '%a, %d %b %Y %H:%M:%S'\n return datetime.fromtimestamp(float(self._time)).strftime(format_)\n except ValueError:\n return plastic_date()", "def now(time):\n a = datetime.fromtimestamp(time).strftime('%Y-%m-%d %H:%M:%S')\n return a", "def now(time):\n a = datetime.fromtimestamp(time).strftime('%Y-%m-%d %H:%M:%S')\n return a", "def time(self):\n return parse_time(self['timestamp'])", "def time_detected(self) -> datetime:\n return datetime.fromtimestamp(\n self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6\n ).astimezone(timezone.utc)", "def to_time(seconds:int) -> str:\n secs = int(seconds)\n return strftime(\"%I:%M:%S %p\", gmtime(seconds))", "def from_sym_time(self):\n try:\n hex_to_dec = [int(sym[i:i+2], 16) for i in range(0, len(sym), 2)]\n hex_to_dec[0] = hex_to_dec[0] + 1970\n hex_to_dec[1] = hex_to_dec[1] + 1\n dt_obj = dt(hex_to_dec[0], hex_to_dec[1], hex_to_dec[2], hex_to_dec[3], hex_to_dec[4], hex_to_dec[5])\n self.in_symtime = dt_obj.strftime('%Y-%m-%d %H:%M:%S.%f')\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.in_symtime = False\n return self.in_symtime", "def to_sym_time(self):\n try:\n dt_obj = duparser.parse(timestamp)\n sym_year = '{0:x}'.format(dt_obj.year - 1970).zfill(2)\n sym_month = '{0:x}'.format(dt_obj.month - 1).zfill(2)\n sym_day = '{0:x}'.format(dt_obj.day).zfill(2)\n sym_hour = '{0:x}'.format(dt_obj.hour).zfill(2)\n sym_minute = '{0:x}'.format(dt_obj.minute).zfill(2)\n sym_second = '{0:x}'.format(dt_obj.second).zfill(2)\n self.out_symtime = sym_year + sym_month + sym_day + sym_hour + sym_minute + sym_second\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_symtime = False\n return self.out_symtime", "def _str2time(self, timestring):\n if not timestring:\n return 0\n\n #\n # NOTE: the time can be larger than is expressible using a 32-bit\n # Python; e.g., 380731122950Z. In this case, the number of seconds will\n # be correct (2164192190L in this case), but this value won't be\n # convertible to a system time_t value.\n #\n return calendar.timegm(\n time.strptime(\n timestring[0:4] + ' ' +\n timestring[4:6] + ' ' +\n timestring[6:8] + ' ' +\n timestring[8:10] + ' ' +\n timestring[10:12] + ' ' +\n timestring[12:14],\n '%Y %m %d %H %M %S'))", "def parse_time(s):\n return time.gmtime(float(s))", "def time_form(gdf):\n gdf['time'] = gdf['time'].dt.strftime(\"%Y-%m-%dT%H:%M:%S\")\n \n return gdf", "def converttime(time, currentformat, newformat):\n\n # Define conversion dictionary\n conversions = {\n \"milliseconds\": {\n \"milliseconds\": \"time\",\n \"seconds\": \"time / 1000\",\n \"minutes\": \"time / 1000 / 60\",\n \"hours\": \"time / 1000 / 60 / 60\",\n \"days\": \"time / 1000 / 60 / 60 / 24\",\n \"weeks\": \"time / 1000 / 60 / 60 / 24 / 7\",\n \"fortnights\": \"time / 1000 / 60 / 60 / 24 / 14\",\n \"years\": \"time / 1000 / 60 / 60 / 24 / 365\",\n \"decades\": \"time / 1000 / 60 / 60 / 24 / 365 / 10\",\n \"centuries\": \"time / 1000 / 60 / 60 / 24 / 365 / 100\",\n \"millenniums\": \"time / 1000 / 60 / 60 / 24 / 365 / 1000\"\n },\n \"seconds\": {\n \"milliseconds\": \"time * 1000\",\n \"seconds\": \"time\",\n \"minutes\": \"time / 60\",\n \"hours\": \"time / 60 / 60\",\n \"days\": \"time / 60 / 60 / 24\",\n \"weeks\": \"time / 60 / 60 / 24 / 7\",\n \"fortnights\": \"time / 60 / 60 / 24 / 14\",\n \"years\": \"time / 60 / 60 / 24 / 365\",\n \"decades\": \"time / 60 / 60 / 24 / 365 / 10\",\n \"centuries\": \"time / 60 / 60 / 24 / 365 / 100\",\n \"millenniums\": \"time / 60 / 60 / 24 / 365 / 1000\"\n },\n \"minutes\": {\n \"milliseconds\": \"time * 60 * 1000\",\n \"seconds\": \"time * 60\",\n \"minutes\": \"time\",\n \"hours\": \"time / 60\",\n \"days\": \"time / 60 / 24\",\n \"weeks\": \"time / 60 / 24 / 7\",\n \"fortnights\": \"time / 60 / 24 / 14\",\n \"years\": \"time / 60 / 24 / 365\",\n \"decades\": \"time / 60 / 24 / 365 / 10\",\n \"centuries\": \"time / 60 / 24 / 365 / 100\",\n \"millenniums\": \"time / 60 / 24 / 365 / 1000\"\n },\n \"hours\": {\n \"milliseconds\": \"time * 60 * 60 * 1000\",\n \"seconds\": \"time * 60 * 60\",\n \"minutes\": \"time * 60\",\n \"hours\": \"time\",\n \"days\": \"time / 24\",\n \"weeks\": \"time / 24 / 7\",\n \"fortnights\": \"time / 24 / 14\",\n \"years\": \"time / 24 / 365\",\n \"decades\": \"time / 24 / 365 / 10\",\n \"centuries\": \"time / 24 / 365 / 100\",\n \"millenniums\": \"time / 24 / 365 / 1000\"\n },\n \"days\": {\n \"milliseconds\": \"time * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 24 * 60 * 60\",\n \"minutes\": \"time * 24 * 60\",\n \"hours\": \"time * 24\",\n \"days\": \"time\",\n \"weeks\": \"time / 7\",\n \"fortnights\": \"time / 14\",\n \"years\": \"time / 365\",\n \"decades\": \"time / 365 / 10\",\n \"centuries\": \"time / 365 / 100\",\n \"millenniums\": \"time / 365 / 1000\"\n },\n \"weeks\": {\n \"milliseconds\": \"time * 7 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 7 * 24 * 60 * 60\",\n \"minutes\": \"time * 7 * 24 * 60\",\n \"hours\": \"time * 7 * 24\",\n \"days\": \"time * 7\",\n \"weeks\": \"time\",\n \"fortnights\": \"time / 2\",\n \"years\": \"time / 52\",\n \"decades\": \"time / 52 / 10\",\n \"centuries\": \"time / 52 / 100\",\n \"millenniums\": \"time / 52 / 1000\"\n },\n \"fortnights\": {\n \"milliseconds\": \"time * 14 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 14 * 24 * 60 * 60\",\n \"minutes\": \"time * 14 * 24 * 60\",\n \"hours\": \"time * 14 * 24\",\n \"days\": \"time * 14\",\n \"weeks\": \"time * 2\",\n \"fortnights\": \"time\",\n \"years\": \"time / 26\",\n \"decades\": \"time / 26 / 10\",\n \"centuries\": \"time / 26 / 100\",\n \"millenniums\": \"time / 26 / 1000\"\n },\n \"years\": {\n \"milliseconds\": \"time * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 256 * 24 * 60\",\n \"hours\": \"time * 256 * 24\",\n \"days\": \"time * 256\",\n \"weeks\": \"time * 52\",\n \"fortnights\": \"time * 26\",\n \"years\": \"time\",\n \"decades\": \"time / 10\",\n \"centuries\": \"time / 100\",\n \"millenniums\": \"time / 1000\"\n },\n \"decades\": {\n \"milliseconds\": \"time * 10 * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 10 * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 10 * 256 * 24 * 60\",\n \"hours\": \"time * 10 * 256 * 24\",\n \"days\": \"time * 10 * 256\",\n \"weeks\": \"time * 10 * 52\",\n \"fortnights\": \"time * 10 * 26\",\n \"years\": \"time * 10\",\n \"decades\": \"time\",\n \"centuries\": \"time / 10\",\n \"millenniums\": \"time / 100\"\n },\n \"centuries\": {\n \"milliseconds\": \"time * 100 * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 100 * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 100 * 256 * 24 * 60\",\n \"hours\": \"time * 100 * 256 * 24\",\n \"days\": \"time * 100 * 256\",\n \"weeks\": \"time * 100 * 52\",\n \"fortnights\": \"time * 100 * 26\",\n \"years\": \"time * 100\",\n \"decades\": \"time * 10\",\n \"centuries\": \"time\",\n \"millenniums\": \"time / 10\"\n },\n \"millenniums\": {\n \"milliseconds\": \"time * 1000 * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 1000 * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 1000 * 256 * 24 * 60\",\n \"hours\": \"time * 1000 * 256 * 24\",\n \"days\": \"time * 1000 * 256\",\n \"weeks\": \"time * 1000 * 52\",\n \"fortnights\": \"time * 1000 * 26\",\n \"years\": \"time * 1000\",\n \"decades\": \"time * 100\",\n \"centuries\": \"time * 10\",\n \"millenniums\": \"time\"\n }\n }\n\n # Return evaluated value\n return eval(conversions[currentformat][newformat])", "def getisotime():\n ct = datetime.utcnow()\n return ct.strftime(\"%Y%m%d%H%M\")", "def construct_obstime(self, row):\n return time.Time(self['mjd'][row], format='mjd')", "def time_convert(timestr):\n \n try:\n # Analyse given time str to seperate elements.\n struct_time = time.strptime(timestr[:-4], \"%a, %d %b %Y %H:%M:%S\")\n # Convert given time by secend unit.\n t = time.mktime(struct_time) \n # Re-construct time to isotime format.\n isot = time.strftime(\"%Y-%m-%d\", time.gmtime(t))\n return isot\n \n except:\n return ''", "def str_to_time(my_time):\n time_format = \"%H:%M\"\n try:\n my_time = datetime.strptime(my_time, time_format)\n except:\n my_time = datetime.now()\n\n return my_time", "def get_time() -> str:\n return strftime(\"%H:%M:%S\")", "def get_time(self) -> int:\n t = str(self.eval(\"pyb.RTC().datetime()\").encode(\"utf-8\"))[1:-1].split(\", \")\n return int(t[4]) * 3600 + int(t[5]) * 60 + int(t[6])", "def conv_unix(evt_time):\n st = time.strptime(evt_time, '%Y-%m-%d %H:%M')\n return time.mktime(st)", "def convert_chandra_time(rawtimes):\n\n # rawtimes is in units of CXC seconds, or seconds since 1998.0\n # Compute the Delta T between 1998.0 (CXC's Epoch) and 1970.0 (Unix Epoch)\n\n seconds_since_1998_0 = rawtimes[0]\n\n cxctime = dt.datetime(1998, 1, 1, 0, 0, 0)\n unixtime = dt.datetime(1970, 1, 1, 0, 0, 0)\n\n # Calculate the first offset from 1970.0, needed by matplotlib's plotdate\n # The below is equivalent (within a few tens of seconds) to the command\n # t0 = Chandra.Time.DateTime(times[0]).unix\n delta_time = (cxctime - unixtime).total_seconds() + seconds_since_1998_0\n\n plotdate_start = epoch2num(delta_time)\n\n # Now we use a relative offset from plotdate_start\n # the number 86,400 below is the number of seconds in a UTC day\n\n chandratime = (np.asarray(rawtimes) -\n rawtimes[0]) / 86400. + plotdate_start\n\n return chandratime", "def str2time(s):\n return datetime.strptime(str(s), '%Y%m%d%H%M%S')", "def _to_milliseconds(self, time):\n if isinstance(time, dt.datetime):\n return int(time.timestamp() * 1e3)\n elif isinstance(time, int):\n return time\n else:\n raise NotImplementedError(\"Time format not supported. Use epochs, Datetime or Pandas Datetime\")", "def ghtime(time_str: str, server: str) -> datetime.datetime:\n # < 151228000000\n # > 2015-12-28 00:00:00\n server = server.lower()\n server = 'jp' if server == 'ja' else server\n tz_offsets = {\n 'na': '-0800',\n 'jp': '+0900',\n }\n timezone_str = '{} {}'.format(time_str, tz_offsets[server])\n return datetime.datetime.strptime(timezone_str, '%y%m%d%H%M%S %z')", "def transform_time(t):\n if t is None:\n return None\n elif isinstance(t, basestring):\n return t\n\n dt = datetime.fromtimestamp(t, UTC())\n return dt.strftime('%Y-%m-%dT%H:%M:%S%z')", "def get_time() -> str:\r\n return time.strftime(TIMEFORMAT)", "def orig_time(self) -> float:\n return ntp_to_system_time(self.orig_timestamp)", "def GAME_TIME_ADVANCE(dt):", "def convertTime(df=None):\n if df is None:\n df = load_data()\n df = df[(df['DISCOVERY_TIME'].notnull())]\n df = df[(df['CONT_TIME'].notnull())]\n df['DISCOVERY_TIME'] = df['DISCOVERY_TIME'].astype('int')\n df['CONT_TIME'] = df['CONT_TIME'].astype('int')\n df['DISCOVERY_TIME'] = pd.to_datetime(\n (df['DISCOVERY_TIME'] // 100).astype('str').str.zfill(2) +\n ':' + (df['DISCOVERY_TIME'] % 100).astype('str').str.zfill(2))\n df['CONT_TIME'] = pd.to_datetime(\n (df['CONT_TIME'] // 100).astype('str').str.zfill(2) +\n ':' + (df['CONT_TIME'] % 100).astype('str').str.zfill(2))\n return df", "def timestamp(self) -> dt.datetime:\n ts = self.json_data['timestamp']\n # Extract the datetime object from the `ts` string\n ts = dt.datetime.strptime(ts, '%Y-%m-%d %H:%M:%S')\n # Localise to Eastern time (Formstack returns Eastern times)\n ts = pytz.timezone('US/Eastern').localize(ts)\n # Convert to UTC time\n return ts.astimezone(pytz.timezone('UTC'))", "def get_time():\n return time.strftime(\"%Y%m%d-%H%M%S\")", "def __correct_token_time(self, t_time=None):\n\n if t_time is None:\n t_time = time.time()\n\n if time.localtime(t_time).tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def convert_time_to_seconds(self, time_value):\n time_epoch = []\n mylog.debug('Converting %s to epoch time' % time_value)\n for value in time_value:\n try:\n pattern = ' %I:%M:%S%p'\n time_epoch_mini = int(time.mktime(time.strptime(value, pattern))) \n time_epoch.append(time_epoch_mini)\n except:\n mylog.debug('%s Does not seem to be in format with leading space' % value)\n try:\n pattern = '%I:%M:%S%p'\n time_epoch_mini = int(time.mktime(time.strptime(value, pattern))) \n time_epoch.append(time_epoch_mini)\n except:\n mylog.debug('%s Does not appear to be in format without leading space' % value)\n return time_epoch", "def convert_timestamp(ts):\n format = '%Y-%m-%d %H:%M:%S'\n return datetime.strptime(ts, format)", "async def _time(self, ctx):\n try:\n await self.bot.say('@{0}:'.format(ctx.message.author.name) + '\\nDate is: **' + time.strftime(\"%A, %B %d, %Y\") + '**' + '\\nTime is: **' + time.strftime(\"%I:%M:%S %p\") + '**')\n except Exception as e:\n await self.bot.say(code.format(type(e).__name__ + ': ' + str(e)))", "def to_ios_time(self):\n try:\n dt_obj = duparser.parse(timestamp)\n self.out_iostime = str(int(((dt_obj - self.epoch_2001).total_seconds()) * self.nano_2001))\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_iostime = False\n return self.out_iostime", "def time_to_string(value):\n if value == gst.CLOCK_TIME_NONE:\n return \"--:--:--.---\"\n ms = value / gst.MSECOND\n sec = ms / 1000\n ms = ms % 1000\n mins = sec / 60\n sec = sec % 60\n hours = mins / 60\n mins = mins % 60\n return \"%02d:%02d:%02d.%03d\" % (hours, mins, sec, ms)", "def _ms_to_time(self, milliseconds):\n \n ms = milliseconds\n \n # Get the last 3 digits of the milliseconds\n trunc_ms = ms % 1000\n seconds = (ms / 1000)\n minutes = (seconds / 60)\n hours = minutes / 60\n \n # hours can go above 24, so don't modulus\n return '%02d:%02d:%02d,%03d' % (hours, minutes % 60, seconds % 60, trunc_ms)", "def from_sym_time(self):\n reason = \"[!] Symantec 6-byte hex timestamps are 12 hex characters\"\n ts_type = self.ts_types['symtime']\n try:\n if not len(self.sym) == 12 or not all(char in hexdigits for char in self.sym):\n self.in_symtime = indiv_output = combined_output = False\n pass\n else:\n hex_to_dec = [int(self.sym[i:i+2], 16) for i in range(0, len(self.sym), 2)]\n hex_to_dec[0] = hex_to_dec[0] + 1970\n hex_to_dec[1] = hex_to_dec[1] + 1\n dt_obj = dt(hex_to_dec[0], hex_to_dec[1], hex_to_dec[2], hex_to_dec[3], hex_to_dec[4], hex_to_dec[5])\n self.in_symtime = dt_obj.strftime('%Y-%m-%d %H:%M:%S.%f')\n indiv_output = str(\"{} {}\".format(ts_type, self.in_symtime))\n combined_output = str(\"{}{}\\t\\t{} UTC{}\".format(self.left_color, ts_type, self.in_symtime, self.right_color))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.in_symtime = indiv_output = combined_output = False\n return self.in_symtime, indiv_output, combined_output, reason", "def time_is() -> str:\n return '$'", "def to_seconds(time):\n return 3600 * time", "def convert_timestamp(stamp):\n date = datetime.fromtimestamp(float(stamp))\n return date.strftime(\"%m/%d/%y %I:%M:%S %p\")", "def timestamp(self, t):\n if isinstance(t, datetime):\n t = time.mktime(t.timetuple())\n return t - 631065600", "async def time():\n utc_moment = datetime.utcnow()\n utc_moment = utc_moment.replace(tzinfo=pytz.utc)\n formatting = \"%Y-%m-%d %H:%M:%S\"\n timezone = \"Europe/Moscow\"\n timezone_dt = utc_moment.astimezone(pytz.timezone(timezone))\n dt_str = timezone_dt.strftime(formatting)\n storage.add_data(dt_str)\n return {\"Moscow datetime\": dt_str}", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def to_sym_time(self):\n ts_type = self.ts_types['symtime']\n try:\n dt_obj = duparser.parse(self.timestamp)\n sym_year = '{0:x}'.format(dt_obj.year - 1970).zfill(2)\n sym_month = '{0:x}'.format(dt_obj.month - 1).zfill(2)\n sym_day = '{0:x}'.format(dt_obj.day).zfill(2)\n sym_hour = '{0:x}'.format(dt_obj.hour).zfill(2)\n sym_minute = '{0:x}'.format(dt_obj.minute).zfill(2)\n sym_second = '{0:x}'.format(dt_obj.second).zfill(2)\n self.out_symtime = sym_year + sym_month + sym_day + sym_hour + sym_minute + sym_second\n ts_output = str(\"{}\\t\\t{}\".format(ts_type, self.out_symtime))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_symtime = ts_output = False\n return self.out_symtime, ts_output", "def interpret_time( text ):\n app.logger.debug(\"Decoding time '{}'\".format(text))\n time_formats = [\"ha\", \"h:mma\", \"h:mm a\", \"H:mm\"]\n try: \n as_arrow = arrow.get(text, time_formats).replace(tzinfo=tz.tzlocal())\n as_arrow = as_arrow.replace(year=2016) #HACK see below\n app.logger.debug(\"Succeeded interpreting time\")\n except:\n app.logger.debug(\"Failed to interpret time\")\n flask.flash(\"Time '{}' didn't match accepted formats 13:30 or 1:30pm\"\n .format(text))\n raise\n return as_arrow.isoformat()\n #HACK #Workaround\n # isoformat() on raspberry Pi does not work for some dates\n # far from now. It will fail with an overflow from time stamp out\n # of range while checking for daylight savings time. Workaround is\n # to force the date-time combination into the year 2016, which seems to\n # get the timestamp into a reasonable range. This workaround should be\n # removed when Arrow or Dateutil.tz is fixed.\n # FIXME: Remove the workaround when arrow is fixed (but only after testing\n # on raspberry Pi --- failure is likely due to 32-bit integers on that platform)", "def timeConvert(time):\n\n FMTin = '%Y-%m-%d %H:%M:%S'\n FMTout = '%m/%d/%y'\n\n return datetime.strftime(datetime.strptime(time, FMTin), FMTout)", "def get_time(self):\n return time.strftime(\"%d/%m/%y %M:%H:%S\", self.time)", "def get_uk_time(message):\n time_api = 'http://worldtimeapi.org/api/timezone/Europe/London.json'\n london_time = requests.get(time_api).json()\n\n return(\"The current time in London, England is {}\".format(\n london_time['datetime'][11:16]))", "def _astropy_time(time):\n return time if isinstance(time, astropy.time.Time) else astropy.time.Time(parse_time(time))", "def dest_time(self) -> float:\n return ntp_to_system_time(self.dest_timestamp)", "def __float_to_time(float_value):\n time_ms = int(float_value*24*60*60*1e3)\n return (datetime.datetime.min + datetime.timedelta(milliseconds=time_ms)).time()", "def format_time(value: int) -> str:\n return (datetime(1970, 1, 1) + timedelta(milliseconds=value)).strftime('%Y%m%d%H%M%S')", "def convertTime(string):\n try:\n d = dtparser.parse(string)\n except ValueError:\n try:\n d = datetime.fromtimestamp(float(string))\n except ValueError:\n return string\n\n d.replace(tzinfo=tz.tzlocal())\n return datetime.strftime(d, \"%Y/%m/%d %H:%M:%S\")", "def convertTime(time_string):\n if (not isinstance(time_string, str)) or len(time_string) != 10 or not time_string.startswith('-'):\n print('There was an issue with the passed timestring: ', time_string)\n if time_string == '0':\n return timezone.now()\n else:\n raise ValueError('Date in import file is not valid')\n ## !!! NOTE: Unix using different epoch (1970 start rather than 1900->2036) so this library method is giving the wrong date from our timestamp\n timestamp = int(time_string) + 2085935295\n dt = datetime.fromtimestamp(timestamp, timezone.utc ) # 70 year adjustment for unix library\n print('timestamp (UTC): ', timestamp, 'type: ', type(timestamp))\n print('returning: ', dt, 'type: ', type(dt))\n return dt", "def get_timestamp():\n return time.strftime('%Y-%m-%d %H:%M:%S')", "def _ToBlogTime(self, time_tuple):\r\n return time.strftime('%Y-%m-%dT%H:%M:%SZ', time_tuple)", "def str_to_time(str):\n if not str:\n return str\n return datetime.datetime.strptime(str.split(\".\")[0], DEFAULT_SERVER_TIME_FORMAT).time()", "def raw_time_to_centi(raw_time: str) -> int:\n\n time = int(raw_time[2:7])\n minutes = time // (Time.SECOND * 100)\n\n time = (time % (Time.SECOND * 100)) + minutes * Time.MINUTE\n return time", "def convert_time_to_second(time_string):\n return int(time.mktime(time.strptime(time_string, TIME_PATTERN)))", "def convert_timestamp():\n ts = int(sys.argv[1])\n human_readable_time = '%m/%d/%Y @ %I:%M %p (UTC)'\n print(datetime.utcfromtimestamp(ts).strftime(human_readable_time))", "def convert_time(self, event):\n try:\n #Compare other unit to one unit(seconds)\n current_value, current_unit = float(\"0\" + str(self.v.get())), self.dropdown.get()\n unit_comp = {\"centuries\": 3153600000.0, \"days\": 86400.0, \"decades\": 315360000.0, \"femtoseconds\": 1e-15, \"fortnights\": 1209600.0, \"hours\": 3600.0, \"microseconds\": 1e-06, \"millenia\": 31536000000.0, \"milliseconds\": 0.001, \"minutes\": 60.0, \"months(Common)\": 2628000.0, \"months(Synodic)\": 2551442.8896, \"nanoseconds\": 1e-09, \"picoseconds\": 1e-12, \"quarters(Common)\": 7884000.0, \"seconds\": 1.0, \"shakes\": 1e-08, \"weeks\": 604800.0, \"years(Average Gregorian)\": 31556952.0, \"years(Common)\": 31536000.0, \"years(Julian)\": 31557600.0, \"years(Leap)\": 31622400.0, \"years(Tropical)\": 31556925.216}\n value_comp, printer = current_value * unit_comp[current_unit], \"\"\n unit_list = sorted(unit_comp.keys())\n unit_list.remove(current_unit)\n for unit in unit_list:\n printer += \"To %s \" % unit + \" \" * (max([len(i) for i in unit_list]) - len(unit)) + str(value_comp / unit_comp[unit]) + [\"\", \"\\n\"][unit_list[-1] != unit]\n except ValueError: #In case user enter the other type of value, not Int or Float\n printer = \"Value is invalid.\"\n self.print_text(printer)", "def to_gps_time(self):\n try:\n iso_time = Time(timestamp, format='iso', scale='utc')\n iso_time.format='gps'\n self.out_gpstime = str(iso_time)\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_gpstime = False\n return self.out_gpstime", "def cvt_time(dt_str):\n # Note, these timestamps don't include time zones\n return datetime.strptime(dt_str, '%Y-%m-%dT%H:%M:%S.%fZ')", "def convert_time(time_passed):\n\n minutes = time_passed.seconds // 60\n\n return minutes", "def sbetime2unixtime(value):\n if not isinstance(value, int):\n raise InstrumentParameterException(\"value not a int\")\n\n return SBE_EPOCH + value", "def get_time(self) -> float:\n return self.player.time", "def timestamp(style=False):\r\n\r\n if not style:\r\n return time.strftime('%H:%M:%S%p %D', time.localtime())\r\n else:\r\n return time.strftime(style, time.localtime())", "def get_time():\r\n \r\n dt = datetime.datetime.now()\r\n dt_parsed = dt.strftime(\"%Y-%m-%d %H:%M:%S\")\r\n return dt_parsed", "def utc_to_unix_time(self, t):\n (y,m,d) = str(t).split('-')\n return datetime(int(y), int(m), int(d)).strftime('%s')", "def interpret_time(text):\n app.logger.debug(\"Decoding time '{}'\".format(text))\n time_formats = [\"ha\", \"h:mma\", \"h:mm a\", \"H:mm\"]\n try:\n as_arrow = arrow.get(text, time_formats).replace(tzinfo=tz.tzlocal())\n as_arrow = as_arrow.replace(year=2016) # HACK see below\n app.logger.debug(\"Succeeded interpreting time\")\n except:\n app.logger.debug(\"Failed to interpret time\")\n flask.flash(\"Time '{}' didn't match accepted formats 13:30 or 1:30pm\"\n .format(text))\n raise\n return as_arrow.isoformat()\n # HACK Workaround\n # isoformat() on raspberry Pi does not work for some dates\n # far from now. It will fail with an overflow from time stamp out\n # of range while checking for daylight savings time. Workaround is\n # to force the date-time combination into the year 2016, which seems to\n # get the timestamp into a reasonable range. This workaround should be\n # removed when Arrow or Dateutil.tz is fixed.\n # FIXME: Remove the workaround when arrow is fixed (but only after testing\n # on rasp Pi failure is likely due to 32-bit integers on that platform)" ]
[ "0.5873298", "0.58109105", "0.58083147", "0.5758464", "0.56725055", "0.5670564", "0.5648157", "0.5589003", "0.5543408", "0.5492668", "0.53174144", "0.53155077", "0.5314277", "0.52799726", "0.5264059", "0.5256253", "0.5210616", "0.5203832", "0.5203313", "0.51997644", "0.51930267", "0.51914006", "0.5188009", "0.5179832", "0.51788926", "0.5164332", "0.5162852", "0.5136937", "0.51277125", "0.5123108", "0.51219875", "0.51219875", "0.51022464", "0.5100638", "0.5089663", "0.5079632", "0.50718766", "0.50713277", "0.5044509", "0.50361603", "0.5036099", "0.5027641", "0.5025572", "0.5018804", "0.501704", "0.5002094", "0.4999716", "0.49936557", "0.4989765", "0.49876666", "0.49766186", "0.497651", "0.49751955", "0.4972717", "0.49709636", "0.49645248", "0.4947184", "0.49360538", "0.49359274", "0.49302554", "0.49288797", "0.49235395", "0.49160263", "0.49152792", "0.49133295", "0.49089712", "0.49045268", "0.49026334", "0.4901401", "0.4899731", "0.4899349", "0.48933238", "0.48913962", "0.48881343", "0.48874494", "0.48839426", "0.48824742", "0.4882274", "0.48784852", "0.48728052", "0.4869829", "0.4868853", "0.4866152", "0.48633742", "0.48549873", "0.4854198", "0.4852276", "0.48495835", "0.48485258", "0.48483595", "0.48435542", "0.4836803", "0.48336548", "0.4824457", "0.4813004", "0.48121387", "0.4804026", "0.47896942", "0.4789101", "0.47882962" ]
0.6189894
0
What should I exactly repeat?
async def repeat( text: ('str', 'The content to repeat') ): if not text: text = 'nothing to repeat' return InteractionResponse(text, allowed_mentions = None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def repeated_iteration(self) -> global___Statement.Iteration.RepeatedIteration:", "def _do_iteration(self):\n return True", "def everytime(self):\n return True", "def naked_singles(self):\n self.change = True\n while self.change:\n self.naked_round()", "def test_repeatable(self):\n\n def run(seed, ModelClass=Model):\n \"\"\"Return the history of a run\"\"\"\n model = ModelClass(random_seed=seed)\n return model.one_trial(1, 10)\n\n self.assertEqual(run(0, ModelClass=Model).data, run(0, ModelClass=Model).data)\n self.assertEqual(run(0, ModelClass=ReplicatedModel).data, run(0, ModelClass=ReplicatedModel).data)", "def repeat(self):\n return self._repeat", "def loop(self):\n pass", "def oneIteration(self):\n\t\traise NotImplementedError", "def only_once(self) -> bool:\n return self.times == 1", "def _run_cycle(self):\n pass", "def think(self):\n pass", "def repeat(s):\r\n\r\n return s", "def task4_1(self):\n\n pass", "def _run(self):\n while(self._loop):\n pass", "def MULTIPLAYER_LOOP():\n pass", "def _regr_basic():", "def bookkeep(self) :\n\t\tself.loopiter += 1", "def repeat(self):\n return self._get('repeat')", "def setRepeatedPass(self, repeatedPass):", "def run( self, cycles=-1 ):", "def task4(self):\n\n pass", "def algorithm_loop(self):", "def __call__(self, *args):\n self.count = self.count + 1", "def run_one_step(self):\n pass", "def forever():\n\n def animate(thing):\n thing = list(thing)\n yield from repeat(thing)\n return animate", "def sample(self):", "def go_again(self):\n return True", "def loop(self):\n raise NotImplementedError()", "def looptcs(self): \n while self.itr < 1: \n #self.genRandomNoise() #developing\n self.tcs(lpf=1)\n #self.itr +=1 ", "def repeat(fn):\n def repeated():\n i = 0\n while i < random_test_iterations:\n fn()\n i += 1\n # nosetest runs functions that start with 'test_'\n repeated.__name__ = fn.__name__\n return repeated", "def simple():\n yield 1\n yield 2\n yield 3", "def _generate_pileups(self):\n pass", "def exhaust (self):\n self.counter = self.times", "def GAME_LOOP():\n pass", "def iterate(self):", "def test_repeated_calls_different_quotes(self):\n quoteSet = set()\n for i in range(5):\n quoteSet.add(getRandomJoke()[\"joke\"])\n self.assertEqual(len(quoteSet) > 1, True)", "def smarter():\r\n pass", "async def repeat(self,ctx, times: int, content='repeating...'):\n for i in range(times):\n await ctx.send(content)", "def run_nop(self):\n pass", "def one(self):", "def totem_random():\n random_head()\n random_head()\n random_head()", "def g():", "def postLoopFunctions(self):\n\t\treturn", "def GAMEOVER_LOOP():\n pass", "def task5(self):\n\n pass", "async def repeat(times : int, content='repeating...'):\n for i in range(times):\n await bot.say(content)", "def next():", "def next():", "async def repeat(ctx, times : int, content='repeating...'):\n for i in range(times):\n await bot.say(content)", "def RUN(self):", "def run(self): # pragma: no cover\n while True:\n self.update()", "def task3(self):\n\n pass", "def dummy(self):\n pass", "def trial(self):\n pass", "def seed():", "def test_02_visit_again(self):", "def run(self):\n for i in range(self.nreps):\n print self.ch,", "def repeat(self, count):\n return self.Sequence((self,) * count)", "def _prepare(self):\n for n in range(4):\n self._code += str(random.randint(1, 9))", "def dance(self):\n if not self.safe_to_dance():\n return False #shutdown\n for x in range(4): \n self.shuffle()\n self.skipp()\n self.spin_dizzy()\n self.for_back()\n self.break_neck()\n self.swiggly()\n self.break_neck()\n self.backward_shimmey()", "def exercise_b2_53():\r\n pass", "def run_multiple_test_cycles(self):\n # Perform as many cycles as required\n while self.args.repetitions >= 0:\n self.run_one_test_cycle()\n self.args.repetitions -= 1", "def cycle(self):\n self.all_animals_eat()\n self.mate_all_animals()\n self.move_all_animals()\n self.age_all_animals()\n self.annual_weight_loss_all_animals()\n self.annual_death_all_animals()", "def exercise_b2_69():\r\n pass", "def testrandom(self):\n for i in range(100):\n AmuletAbility()", "def preLoopFunctions(self):\n\t\treturn", "def advance(self) -> None:\n pass", "def test(self):\n # -- Test --\n\n # (1)\n\n # (2)\n\n # (3)\n\n # (4)\n # -- Test --", "def repeat(self, number_of_repeats):\n return \"G\" + str(number_of_repeats)", "def repeat(phrase, num):\n #JUST DISCOVERED DOCTEST!!!\n #ALL THIS TIME i'VE BEEN MANUALLY CUTTING AND PASTING THE DOCTESTS TO MANUALLY\n #TEST THEM WHEN I COULD HAVE JUST BEEN RUNNING THEM!\n #GAAAAAHHHH!!!\n if isinstance(num, (int, float)) and num >= 0:\n return phrase * num\n else:\n return None", "def all(self):", "def all(self):", "def regular(self):", "def random_values():\n while True:\n yield random()", "def disarm(self):\n pass", "def test_never_same():\n g = RG.larger_random()\n hundred_calls = set([next(g) for _ in range(20)])\n assert len(hundred_calls) == 20", "def task1(self):\n \n pass", "def simulationTwoDrugsDelayedTreatment():\n\n # TODO", "def exercise_gen(ret_val, times):", "def do_twice(f):\n f()\n f()", "def do_twice(f):\n f()\n f()", "def go_again(self):\n return False", "def next_run(self):\n self.load_run(run=self.run+1)", "def timesGoBy(self):\n self.wcount += 1", "def exercise_b2_27():\r\n pass", "def exercise_b2_56():\r\n pass", "def exercise_b2_43():\r\n pass", "async def repeat(ctx, times: int, content='repeating...'):\n for i in range(times):\n await ctx.send(content)", "def simple():", "def simple():", "def test_foo(self):\n self.ran = True\n 1 / 0", "def realsense():\n pass", "def MakeRepeat1(self,content):\n return self.register(Repeat1(content,reg=self))", "def fit_once(self,random_restart=False):\n raise NotImplementedError", "def repeat_rythm(self, rythm, dev):\n return rythm", "def simulate(self):\n self.round += 1", "def go_again(self):\n num = random.randint(1, 2)\n if num == 1:\n return True\n else:\n return False", "def make_repeatable():\n random.seed(1234)\n np.random.seed(1234)", "async def repeat(ctx, *, arg):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n logger.info('repeat: ' + arg, extra={'invoker': ctx.message.author.name})\r\n await ctx.send(arg)", "def test_repeated_iteration(self):\n self.Person(name=\"Person 1\").save()\n self.Person(name=\"Person 2\").save()\n\n queryset = self.Person.objects\n people1 = [person for person in queryset]\n people2 = [person for person in queryset]\n\n # Check that it still works even if iteration is interrupted.\n for _person in queryset:\n break\n people3 = [person for person in queryset]\n\n assert people1 == people2\n assert people1 == people3", "def Run():\r\n pass" ]
[ "0.67247885", "0.62914574", "0.62090546", "0.61698216", "0.61460817", "0.6112917", "0.6105271", "0.59547746", "0.5865101", "0.58294857", "0.5821225", "0.5808737", "0.5754446", "0.5729373", "0.570239", "0.5692832", "0.56896436", "0.5685115", "0.5657328", "0.56556696", "0.5643494", "0.5637076", "0.5636245", "0.5628343", "0.5623852", "0.5611191", "0.56043065", "0.55915797", "0.5589385", "0.55754405", "0.5571603", "0.5569889", "0.5562355", "0.5544976", "0.5539848", "0.55347335", "0.553166", "0.5530849", "0.5526169", "0.55150616", "0.5504071", "0.5501731", "0.54939383", "0.5491812", "0.5491208", "0.54904664", "0.5488733", "0.5488733", "0.5486874", "0.5478406", "0.5467205", "0.54565054", "0.5421623", "0.5420482", "0.5418912", "0.5403834", "0.54036474", "0.53985465", "0.5394819", "0.53931475", "0.5382173", "0.53780437", "0.5376369", "0.5376319", "0.5375811", "0.5367767", "0.53607196", "0.5359546", "0.5349665", "0.5340104", "0.533751", "0.533751", "0.53343564", "0.5330352", "0.53298014", "0.5319262", "0.5317309", "0.53043264", "0.53031373", "0.5298895", "0.5298895", "0.529644", "0.5295927", "0.5295533", "0.5295321", "0.5290563", "0.5285455", "0.528289", "0.5280176", "0.5280176", "0.527745", "0.5259987", "0.5247189", "0.5246953", "0.5239005", "0.52357507", "0.5235559", "0.52322817", "0.52321583", "0.52304053", "0.5229063" ]
0.0
-1
Shows the guild's features.
async def guild_features(event): guild = event.guild return Embed( f'{guild.name}\'s features', ', '.join(sorted(feature.name for feature in guild.iter_features())), ).add_thumbnail( guild.icon_url )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def features():\n\n return render_template('features.html')", "def print_features(self):\n print(feature_print_string.format(self.client_id,\n *tuple(map(lambda x: str(int(x)).zfill(4), self.features))))", "def display_features():\n\n # Parse the URL, check for implicit resources, extract the primary record\n # http://127.0.0.1:8000/eden/gis/display_features&module=pr&resource=person&instance=1&jresource=presence\n ok = 0\n if \"module\" in request.vars:\n res_module = request.vars.module\n ok +=1\n if \"resource\" in request.vars:\n resource = request.vars.resource\n ok +=1\n if \"instance\" in request.vars:\n instance = int(request.vars.instance)\n ok +=1\n if \"jresource\" in request.vars:\n jresource = request.vars.jresource\n ok +=1\n if ok != 4:\n session.error = T(\"Insufficient vars: Need module, resource, jresource, instance\")\n raise HTTP(400, body=s3xrc.xml.json_message(False, 400, session.error))\n\n component, pkey, fkey = s3xrc.model.get_component(res_module, resource, jresource)\n table = db[\"%s_%s\" % (res_module, resource)]\n jtable = db[str(component.table)]\n query = (jtable[fkey] == table[pkey]) & (table.id == instance)\n # Filter out deleted\n deleted = (table.deleted == False)\n query = query & deleted\n # Filter out inaccessible\n query2 = db.gis_location.id == jtable.location_id\n accessible = s3_accessible_query(\"read\", db.gis_location)\n query2 = query2 & accessible\n\n features = db(query).select(db.gis_location.ALL, left = [db.gis_location.on(query2)])\n\n # Calculate an appropriate BBox\n bounds = gis.get_bounds(features=features)\n\n map = gis.show_map(\n feature_queries = [{\"name\" : \"Features\", \"query\" : features, \"active\" : True}],\n bbox = bounds,\n window = True,\n closable = False,\n collapsed = True\n )\n\n return dict(map=map)", "def GetFeatures(self):\n try:\n return self._SendRequest(HTTP_GET, \"/%s/features\" % GANETI_RAPI_VERSION,\n None, None)\n except GanetiApiError as err:\n # Older RAPI servers don't support this resource\n if err.code == HTTP_NOT_FOUND:\n return []\n\n raise", "def features(self) -> Optional[pulumi.Input['DevToolPortalFeatureSettingsArgs']]:\n return pulumi.get(self, \"features\")", "def display_feature():\n\n # The Feature\n feature_id = request.args(0)\n\n # Check user is authorised to access record\n if not s3_has_permission(\"read\", db.gis_location, feature_id):\n session.error = T(\"No access to this record!\")\n raise HTTP(401, body=s3xrc.xml.json_message(False, 401, session.error))\n\n query = db(db.gis_location.id == feature_id).select(limitby=(0, 1))\n feature = query.first()\n\n config = gis.get_config()\n\n try:\n # Centre on Feature\n lat = feature.lat\n lon = feature.lon\n if (lat is None) or (lon is None):\n if feature.get(\"parent\"):\n # Skip the current record if we can\n latlon = gis.get_latlon(feature.parent)\n elif feature.get(\"id\"):\n latlon = gis.get_latlon(feature.id)\n else:\n # nothing we can do!\n raise\n if latlon:\n lat = latlon[\"lat\"]\n lon = latlon[\"lon\"]\n else:\n # nothing we can do!\n raise\n except:\n lat = config.lat\n lon = config.lon\n\n # Calculate an appropriate BBox\n #bounds = gis.get_bounds(features=query)\n\n # Default zoom +2 (same as a single zoom on a cluster)\n zoom = config.zoom + 2\n\n map = gis.show_map(\n feature_queries = [{\"name\" : \"Feature\", \"query\" : query, \"active\" : True}],\n lat = lat,\n lon = lon,\n #bbox = bounds,\n zoom = zoom,\n window = True,\n closable = False,\n collapsed = True\n )\n\n return dict(map=map)", "def display_feature(self):\n return ', '.join([feature.name for feature in self.features.all()])", "def show_ground_feature():\n ground_description_int = GROUND_FEATURES_LIST[ZERO_BASE_PLYR_POS]\n mvaddstr(16, 3, GROUND_DESCRIPTIONS.get(ground_description_int), color_pair(GROUND_FEATURES_COLOUR) | A_BOLD)", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def exposes_features(self):\n return self._features_op is not None", "def test_available_features():\n features = (\n \"Feature Name : Capa1\\r\\n State : Enabled\\r\\n\"\n \"Feature Name : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=features)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n out = dism.available_features()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Features\"]\n )\n assert out == [\"Capa2\"]", "def list_features(self, dataset, reverse=False, start=None, limit=None):\n uri = URITemplate(self.baseuri + '/{owner}/{id}/features').expand(\n owner=self.username, id=dataset)\n\n params = {}\n if reverse:\n params['reverse'] = 'true'\n if start:\n params['start'] = start\n if limit:\n params['limit'] = int(limit)\n return self.session.get(uri, params=params)", "def getFeatures(self, state, action, thisAgent):\n util.raiseNotDefined()", "def test_get_features():\n features = (\n \"Feature Name : Capa1\\r\\n State : Enabled\\r\\n\"\n \"Feature Name : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=features)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n out = dism.get_features()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Features\"]\n )\n assert out == [\"Capa1\", \"Capa2\"]", "def test_can_enable_features_per_user(page):\n username = login_as_superuser(page)\n\n features.toggle_feature_for_user(page=page,\n feature_name='team-management-redo',\n username=username,\n enable=False)\n\n page.goto(\"/form/view\")\n nav = page.locator('.usa-nav__primary li').all()\n assert len(nav) == 2\n\n features.toggle_feature_for_user(page=page,\n feature_name='team-management-redo',\n username=username,\n enable=True)\n\n page.goto(\"/form/view\")\n nav = page.locator('.usa-nav__primary li').all()\n assert len(nav) == 3\n assert '๐Ÿ†• Team Management' in nav[2].text_content().strip()\n\n features.toggle_feature_for_user(page=page,\n feature_name='team-management-redo',\n username=username,\n enable=False)\n\n page.goto(\"/form/view\")\n nav = page.locator('.usa-nav__primary li').all()\n assert len(nav) == 2", "def show_flavors():\n return get_flavors()", "def advanced_machine_features(self) -> 'outputs.AdvancedMachineFeaturesResponse':\n return pulumi.get(self, \"advanced_machine_features\")", "def readFeatures(self):\n\t\treturn self._fileSystem.readFeatures()", "def test_installed_features():\n features = (\n \"Feature Name : Capa1\\r\\n State : Enabled\\r\\n\"\n \"Feature Name : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=features)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n out = dism.installed_features()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Features\"]\n )\n assert out == [\"Capa1\"]", "def display_flavors(self):\n for flavor in self.flavors:\n print(f\"- {flavor}\")", "def help():\n managed_services = help_managed_service()\n click.echo(format_text(\n \"List of Fandogh managed services:\", TextStyle.OKBLUE\n ))\n for managed_service in managed_services:\n click.echo(\"\\t* Service name: {}\".format(managed_service['name']))\n for parameter_name, description in managed_service['options'].items():\n click.echo(\"\\t\\t. {}:\\t{}\".format(parameter_name.ljust(20), description))", "def guest_os_features(self) -> Sequence['outputs.GuestOsFeatureResponse']:\n return pulumi.get(self, \"guest_os_features\")", "def features(self):\n return self._features", "def display_flavors(self):\r\n print(\"We have the following flavors\"\"\")\r\n for flavor in self.flavors:\r\n print(\" ...\" + str(flavor.title()))", "def listFeatures() :\n global features\n features = [feature.split(\".\")[0] for feature in os.listdir(os.path.abspath(__file__)[:-11])\n if feature.endswith(\".py\") and feature != \"__init__.py\"]", "def get_features(self):\n return self._features", "def advanced_features(self):\n return self._advanced_features", "def setup_features():\n\n core_features = {\"web\": [\"content_directory\", \"controllers\", \"templates\"]}\n\n imported_features = []\n for feature_type, feature_list in core_features.items():\n features_list_names = \", \".join(feature_list)\n print(\n \"** Setting up {0} features {1}\".format(\n info(feature_type), info(features_list_names)\n )\n )\n for feature_name in feature_list:\n script_dir = dirname(abspath(__file__))\n module_fname = join(\n script_dir, \"features\", feature_type, feature_name + \".py\"\n )\n\n feature_dict = {}\n with open(module_fname) as source_file:\n exec(compile(source_file.read(), module_fname, \"exec\"), feature_dict)\n try:\n feature = feature_dict[\"Feature\"]()\n except KeyError:\n print_error(\n \"Feature module '%s' does not provide a Feature class!\"\n % feature_name\n )\n sys.exit(1)\n try:\n feature.setup()\n except: # NOQA: E722\n print_error(\"Failed setting up feature '%s' !\" % feature_name)\n raise\n imported_features.append(feature)\n\n for feature in imported_features:\n if hasattr(feature, \"activate\"):\n feature.activate()", "def _features_of(entry: _LexiconEntry) -> str:\n return entry[\"features\"]", "def features(request):\n # Order features by amount of upvotes\n features_list = Feature.objects.all().order_by('-upvotes')\n \n # Pagination for features\n page = request.GET.get('page', 1)\n paginator = Paginator(features_list, 10)\n try:\n features = paginator.page(page)\n except PageNotAnInteger:\n features = paginator.page(1)\n except EmptyPage:\n features = paginator.page(paginator.num_pages)\n \n # Display graphs\n chart_total_feature = FeaturesTotalChart() \n chart_feature_daily = FeaturesDailyStatus()\n chart_feature_weekly = FeaturesWeeklyStatus()\n chart_feature_monthly = FeaturesMonthlyStatus()\n \n return render(request, \"features.html\", {\n \"features\": features,\n 'chart_total_feature': chart_total_feature,\n 'chart_feature_daily': chart_feature_daily,\n 'chart_feature_weekly': chart_feature_weekly,\n 'chart_feature_monthly': chart_feature_monthly\n })", "def list_features(\n self,\n ) -> Callable[\n [featurestore_service.ListFeaturesRequest],\n Awaitable[featurestore_service.ListFeaturesResponse],\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_features\" not in self._stubs:\n self._stubs[\"list_features\"] = self.grpc_channel.unary_unary(\n \"/google.cloud.aiplatform.v1beta1.FeaturestoreService/ListFeatures\",\n request_serializer=featurestore_service.ListFeaturesRequest.serialize,\n response_deserializer=featurestore_service.ListFeaturesResponse.deserialize,\n )\n return self._stubs[\"list_features\"]", "def show_tools():\n print(\"\"\"\n List the tools available in this package:\n create_date_features(df, date) #TODO\n create_date_feature_bisiness_quater(df = None, date = None)\n create_date_feature_daytime(df = None, date = None)\n create_date_feature_is_public_holiday(df, date, start, end, country = 'US')\n create_date_feature_is_month_end(df = None, date = None, last = 1)\n create_date_feature_is_weekend(df = None, date = None)\n create_date_feature_is_weekday(df = None, date = None)\n create_date_feature_season(df = None, date = None)\n create_grid(df, keys, target) #TODO\n create_lag_features_with_time_feature(df, cols, time, n = 5, fillna = True)\n create_lag_features_ohne_time_feature(df, cols, n = 5, fillna = True)\n create_window_feature(df, cols = None, col2 = None, win_size = 2, win_type = None, min_periods = 1, agg = 'mean')\n mean_encoder(df, cols, tg)\n \"\"\")", "def findFeatures(self):\n\t\tpass", "def show_help():\n\n url = (\n r\"https://agcloud.sharepoint.com/:p:/r/sites/\"\n r\"O365-UG-2HEngineeringSoftware/Shared%20Documents/2H%20Datalab/\"\n r\"DataLab%20Guidance.pptx?d=wcabe347939784784b8d7270cdf7938e7&csf=1&e=9LJsCD\"\n )\n webbrowser.open(url)", "def features(self):\n\n return self._features", "def _runtime_feature_list(self):\n supported_features_command = [self._path_to_driver(), '--print-supported-features']\n try:\n output = self._executive.run_command(supported_features_command, error_handler=Executive.ignore_error)\n except OSError, e:\n _log.warn(\"Exception running driver: %s, %s. Driver must be built before calling WebKitPort.test_expectations().\" % (supported_features_command, e))\n return None\n\n # Note: win/DumpRenderTree.cpp does not print a leading space before the features_string.\n match_object = re.match(\"SupportedFeatures:\\s*(?P<features_string>.*)\\s*\", output)\n if not match_object:\n return None\n return match_object.group('features_string').split(' ')", "def features(self) -> datasets.Features:\n return datasets.Features(\n {\n \"sequence\": datasets.Value(\"string\"),\n \"description\": datasets.Value(\"string\"),\n \"id\": datasets.Value(\"string\"),\n }\n )", "def print_all_features(self):\n if self.DEBUG:\n print('weights')\n print('-------------------------')\n print('w_EDR: ', self.w_EDR)\n print('w_Resource', self.w_RESOURCE)\n print('w_Distance', self.w_DISTANCE)\n print(' ')\n print('Features')\n print('-------------------------')\n print('Agent locations at time step:', self.t, ' are ', self.agent_locations)\n print('Agents that are idle at time step:', self.t, ' are ', self.is_agent_idle)\n print('Tasks that are alive at time step:', self.t, ' are ', self.is_task_alive)\n print('Tasks that are enabled at time step:', self.t, ' are ', self.is_task_enabled)\n print('Tasks that are travel_enabled at time step:', self.t, ' are ', self.travel_time_constraint_satisfied)\n print('Tasks that are in progress at time step:', self.t, ' are ', self.is_task_in_progress)\n print('Tasks that are finished at time step:', self.t, ' are ', self.is_task_finished)\n\n print(\"agent1 is currently at location \", self.get_vectorized_location(self.agents[0].getz()), ' and is working on ',\n self.agents[0].curr_task)\n print(\"agent2 is currently at location \", self.get_vectorized_location(self.agents[1].getz()), ' and is working on ',\n self.agents[1].curr_task)", "def features_result_page():\n # ย Get all fields from form\n module = request.forms.getall('module')\n version = request.forms.getall('version')\n software = request.forms.getall('sw')\n\n # Build html\n module, version, software, result = do_features_request(module_type=module,\n version=version, software=software)\n\n # Build template page\n with open(\"./header.html\") as header, open('./features.tpl') as features, open('./footer.html') as footer:\n template_html = header.read() + features.read() + footer.read()\n\n if not result:\n result = []\n\n output = template(template_html, module=module, version=version, sw=software, result=result)\n\n return output", "def get(self, *args):\n return _libsbml.ListOfSpeciesFeatures_get(self, *args)", "def get_available_features(self) -> list[str]:\n modules = []\n for mdir in [ZeroBot.__path__[0]] + self.config[\"Core\"][\"ModuleDirs\"]:\n mdir = Path(mdir)\n modules += [child.stem for child in mdir.glob(\"feature/*.py\")]\n return modules", "async def getFeatures(self, body=\"\"):\n payload = {}\n \n # Parameter validation\n schema = ConfigurationValidator.getFeatures()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(api_url=self._urls[\"getFeatures\"], proccessed_params=\"\"\"{\"required\":[],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + base64.b64encode(\"{}:{}\".format(self._conf.applicationID, self._conf.applicationToken).encode()).decode()\n }\n if self._conf.locationDetails:\n headers[\"x-location-detail\"] = ujson.dumps(self._conf.locationDetails)\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(urlparse(self._urls[\"getFeatures\"]).netloc, \"get\", await create_url_without_domain(\"/service/application/configuration/v1.0/feature\", ), query_string, headers, body, exclude_headers=exclude_headers), data=body, cookies=self._conf.cookies)", "async def _guild(self, ctx: Context) -> None:\n guild = ctx.guild\n owner = guild.owner\n created = convert_date(guild.created_at)\n\n num_roles = len(guild.roles)\n emojis = len(guild.emojis)\n members = guild.members\n\n presences = {\"online\": 0, \"offline\": 0, \"idle\": 0, \"dnd\": 0}\n\n member_type = {\"user\": 0, \"staff\": 0, \"bot\": 0}\n\n for member in members:\n presences[str(member.status)] += 1\n member_type[(\"user\", \"bot\")[member.bot]] += 1\n if (\n member.guild_permissions.ban_members\n or member.guild_permissions.kick_members\n or member.guild_permissions.manage_messages\n ):\n member_type[\"staff\"] += 1\n\n online, offline, idle, dnd = presences.values()\n users, staff, bots = member_type.values()\n\n value2 = textwrap.dedent(\n f\"\"\"\n Online: {online}\n Offline: {offline}\n Idle: {idle}\n DnD: {dnd}\n \"\"\"\n )\n\n value1 = textwrap.dedent(\n f\"\"\"\n Members: {len(members)}\n Users: {users}\n Staff: {staff}\n Bots: {bots}\n \"\"\"\n )\n\n value3 = textwrap.dedent(\n f\"\"\"\n Category Channels: {len(guild.categories)}\n Voice Channels: {len(guild.voice_channels)}\n Text Channels: {len(guild.text_channels)}\n \"\"\"\n )\n\n embed = EmbedHelper(\n title=f\"{guild.name.title()}'s Info\",\n timestamp=datetime.utcnow(),\n description=textwrap.dedent(\n f\"\"\"\n **Owner:** {owner.mention}\n **Created At:** {created}\n **Emojis:** {emojis}\n **Roles:** {num_roles}\n \"\"\"\n ),\n thumbnail_url=guild.icon_url,\n footer_text=f\"Command Invoked by {ctx.author}\",\n footer_url=ctx.author.avatar_url,\n fields=[\n {\"name\": f\"**Member Count:**\", \"value\": value1},\n {\"name\": \"**Presences**\", \"value\": value2},\n {\n \"name\": f\"**Total Channel Count:** {len(guild.channels)}\",\n \"value\": value3,\n \"inline\": False,\n },\n ],\n )\n\n await ctx.send(embed=embed)", "def get_all_features(self) :\n raise NotImplementedError", "def get_features(self, request, **kwargs):\n raise NotImplementedError()", "def feature(self, node=\"clickhouse1\"):\n self.context.node = self.context.cluster.node(node)\n\n for scenario in loads(current_module(), Scenario):\n scenario()", "def list_feature_tests(self):\n\t\treturn self.test_names", "def get_who_features(self):\n return self.who_made_features", "def feature(self):\n Feature(run=default_frame, flags=TE)\n Feature(run=load(\"window_functions.tests.rows_frame\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_frame\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_overflow\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_datetime\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_errors\", \"feature\"), flags=TE)", "def feature_class():\n if deployment_settings.get_security_map() and not s3_has_role(\"MapAdmin\"):\n unauthorised()\n\n tablename = \"%s_%s\" % (module, resourcename)\n table = db[tablename]\n\n # Model options\n table.gps_marker.comment = DIV( _class=\"tooltip\",\n _title=\"%s|%s\" % (T(\"GPS Marker\"),\n T(\"Defines the icon used for display of features on handheld GPS.\")))\n\n # CRUD Strings\n LIST_FEATURE_CLASS = T(\"List Feature Classes\")\n s3.crud_strings[tablename] = Storage(\n title_create = ADD_FEATURE_CLASS,\n title_display = T(\"Feature Class Details\"),\n title_list = T(\"Feature Classes\"),\n title_update = T(\"Edit Feature Class\"),\n title_search = T(\"Search Feature Class\"),\n subtitle_create = T(\"Add New Feature Class\"),\n subtitle_list = LIST_FEATURE_CLASS,\n label_list_button = LIST_FEATURE_CLASS,\n label_create_button = ADD_FEATURE_CLASS,\n label_delete_button = T(\"Delete Feature Class\"),\n msg_record_created = T(\"Feature Class added\"),\n msg_record_modified = T(\"Feature Class updated\"),\n msg_record_deleted = T(\"Feature Class deleted\"),\n msg_list_empty = T(\"No Feature Classes currently defined\"))\n\n output = s3_rest_controller(module, resourcename)\n\n if not \"gis\" in response.view and response.view != \"popup.html\":\n response.view = \"gis/\" + response.view\n\n return output", "def do_features_request_2(features=None):\n\n # ย connect to database\n cur_db = connect_db(\"172.20.38.50\", \"mvelay\", \"user\", \"sandbox\")\n cursor = cur_db.cursor()\n\n # build whole query\n cur_query = \"\"\" SELECT module, sw, version FROM t_feature\n WHERE feature=\"%s\" AND supported=1;\"\"\" % (features[0])\n\n print cur_query\n cursor.execute(cur_query)\n results = cursor.fetchall()\n cursor.close()\n\n if results:\n results = results[:1000] # Limit to first 1000 results\n else:\n results = None\n\n return features[0], results", "def features(self) -> Optional[pulumi.Input['ProvisionedClustersCommonPropertiesFeaturesArgs']]:\n return pulumi.get(self, \"features\")", "def feature():\n pass", "async def serverinfo(self, context: Context) -> None:\n roles = [role.name for role in context.guild.roles]\n if len(roles) > 50:\n roles = roles[:50]\n roles.append(f\">>>> Displaying[50/{len(roles)}] Roles\")\n roles = \", \".join(roles)\n\n embed = discord.Embed(\n title=\"**Server Name:**\", description=f\"{context.guild}\", color=0x9C84EF\n )\n if context.guild.icon is not None:\n embed.set_thumbnail(url=context.guild.icon.url)\n embed.add_field(name=\"Server ID\", value=context.guild.id)\n embed.add_field(name=\"Member Count\", value=context.guild.member_count)\n embed.add_field(\n name=\"Text/Voice Channels\", value=f\"{len(context.guild.channels)}\"\n )\n embed.add_field(name=f\"Roles ({len(context.guild.roles)})\", value=roles)\n embed.set_footer(text=f\"Created at: {context.guild.created_at}\")\n await context.send(embed=embed)", "def test_can_return_all_current_features_only(self):\n returned_features = return_current_features()\n self.assertTrue(len(returned_features) > 0)\n for feature in returned_features:\n self.assertTrue(feature.is_feature)\n feature_admin_object = SuggestionAdminPage.objects.get(suggestion=feature)\n self.assertTrue(feature_admin_object.in_current_voting_cycle)\n\n all_current_features_admin = SuggestionAdminPage.objects.filter(suggestion__is_feature=True,\n in_current_voting_cycle=True)\n self.assertEqual(len(all_current_features_admin), len(returned_features))", "def supported_features(self):\n return self._supported_features", "def feature_list(self):\n components = self._device_info.get(device_data_constants.KEY_COMPONENT, {})\n # Set is_rma_device.\n components['is_rma_device'] = self._is_rma_device\n return self._feature_list.Encode(components)", "async def server_info(self, ctx):\n guild = ctx.guild\n id = guild.id\n boost_count = guild.premium_subscription_count\n region = str(guild.region)\n channels = len(guild.channels)\n vc = len(guild.voice_channels)\n text_channels = len(guild.text_channels)\n emoji_limit = guild.emoji_limit\n bitrate = guild.bitrate_limit\n filesize = guild.filesize_limit\n members = str(len(guild.members))\n owner = guild.owner.name\n icon = guild.icon_url\n roles = len(guild.roles)\n banned = len(await guild.bans())\n invites = len(await guild.invites())\n created = str(guild.created_at)\n embed = discord.Embed(\n title=guild.name,\n description=\"Server Info:\",\n color=discord.Colour.purple()\n )\n embed.set_thumbnail(url=icon)\n embed.add_field(name=\"ID:\", value=str(id))\n embed.add_field(name=\"Owner: \", value=owner)\n embed.add_field(name=\"Region: \", value=region)\n embed.add_field(name=\"created at: \", value=created)\n embed.add_field(name=\"Boost count: \", value=boost_count)\n embed.add_field(name=\"Members: \", value=members)\n embed.add_field(name=\"Roles:\", value=str(roles))\n embed.add_field(name=\"Channels:\", value=str(channels))\n embed.add_field(name=\"Text Channels:\", value=str(text_channels))\n embed.add_field(name=\"Voice Channels:\", value=str(vc))\n embed.add_field(name=\"Emoji Limit:\", value=str(emoji_limit))\n embed.add_field(name=\"Max Bitrate:\", value=bitrate)\n embed.add_field(name=\"Max Filesize:\", value=filesize)\n embed.add_field(name=\"Banned Members:\", value=str(banned))\n embed.add_field(name=\"Active Invites:\", value=str(invites))\n await ctx.send(\"\", embed=embed)", "async def support(self, ctx: commands.Context) -> None:\n embed = (\n discord.Embed(\n title=\"Supported Services\",\n color=0x00FFCC,\n url=\"https://d.chulte.de\",\n )\n .add_field(\n name=\"YouTube\",\n value=\"Video Urls\\nVideo Search Terms\\nPlaylist Urls\",\n )\n .add_field(\n name=\"Spotify\",\n value=\"Track Links\\nAlbum Links\\nArtist Top-Tracks\\nPlaylists\",\n )\n )\n await ctx.send(embed=embed)", "def features(self) -> List[Feature]:\n return self._features", "async def serverinfo(self, ctx):\n guild = ctx.guild\n\n embed = discord.Embed()\n embed.title = str(guild)\n if guild.icon_url is not None:\n embed.description = '**ID**: {0.id}\\n[Icon URL]({0.icon_url})'.format(guild)\n embed.set_thumbnail(url=guild.icon_url)\n else:\n embed.description = '**ID**: {0.id}'.format(guild)\n\n if guild.me.permissions_in(ctx.channel).kick_members and ctx.author.permissions_in(ctx.channel).kick_members:\n dead_members = await ctx.guild.estimate_pruned_members(days=7)\n members = '{} members, {} of which were active in the past 7 days'.format(guild.member_count,\n guild.member_count - dead_members)\n else:\n members = guild.member_count\n\n embed.add_field(name='Members', value=members)\n\n roles = [x.mention for x in guild.role_hierarchy if not x.is_default()]\n if roles: # only show roles if the server has any\n roles = ', '.join(roles)\n if len(roles) <= 1024: # deal with limits\n embed.add_field(name='Roles', value=roles)\n\n channels = [x[1] for x in sorted([(x.position, x.mention) for x in guild.channels if\n isinstance(x, discord.TextChannel)])]\n channels = ', '.join(channels)\n if len(channels) <= 1024:\n embed.add_field(name='Text channels', value=channels)\n\n if guild.verification_level == discord.VerificationLevel.none:\n verification_level = 'None'\n elif guild.verification_level == discord.VerificationLevel.low:\n verification_level = 'Low'\n elif guild.verification_level == discord.VerificationLevel.medium:\n verification_level = 'Medium'\n elif guild.verification_level == discord.VerificationLevel.high:\n verification_level = '(โ•ฏยฐโ–กยฐ๏ผ‰โ•ฏ๏ธต โ”ปโ”โ”ป'\n else:\n verification_level = 'โ”ปโ”โ”ป ๏พใƒฝ(เฒ ็›Šเฒ )ใƒŽๅฝกโ”ปโ”โ”ป'\n\n if guild.explicit_content_filter == discord.ContentFilter.disabled:\n explicit_level = 'Don\\'t scan any messages'\n elif guild.explicit_content_filter == discord.ContentFilter.no_role:\n explicit_level = 'Scan messages from members without a role'\n else:\n explicit_level = 'Scan messages sent by all members'\n\n info = '**AFK channel**: {0.afk_channel}\\n**AFK timeout**: {0.afk_timeout} seconds\\n' \\\n '**Owner**: {0.owner.mention}\\n**Region**: `{0.region.value}`\\n' \\\n '**Verification level**: {1}\\n**Explicit content filter**: {2}'.format(guild, verification_level,\n explicit_level)\n\n embed.add_field(name='Other miscellaneous info', value=info)\n\n embed.timestamp = guild.created_at\n embed.set_footer(text='Created on')\n\n if ctx.channel.permissions_for(ctx.guild.me).embed_links:\n if ctx.author.id == self.liara.user.id:\n await ctx.message.edit(embed=embed)\n else:\n await ctx.send(embed=embed)\n else:\n await ctx.send('Unable to post serverinfo, please allow the Embed Links permission.')", "def feature_flags(self):\r\n return self.env_tokens.get('FEATURES', dict())", "def home(request):\n gefs_layers,geoserver_wms_url,geoserver_workspace = listLayers()\n\n context = {\n 'gefs_layers':json.dumps(gefs_layers),\n 'geoserver_wms_url':geoserver_wms_url,\n 'geoserver_workspace':geoserver_workspace\n }\n\n return render(request, 'gefs/home.html', context)", "async def serverinfo(self,ctx):\n g = ctx.guild\n embed = discord.Embed()\n embed.set_thumbnail(url = g.icon_url)\n embed.title = \"{} - {}\".format(g.name,g.id)\n embed.add_field(name = \"Owner\",value=\"{} - {}\".format(g.owner,g.owner.id),inline=False)\n embed.add_field(name = \"Created at\", value = str(g.created_at), inline=False)\n embed.add_field(name = \"Total Roles\", value= str(len(g.roles)), inline=False)\n embed.add_field(name = \"Total Members\", value= str(g.member_count), inline=False)\n embed.add_field(name = \"Premium Member\", value= str(g.premium_subscription_count), inline=False)\n embed.add_field(name = \"Premium Tier\", value= str(g.premium_tier), inline=False)\n await self.bot.say(ctx,embed = embed)", "def show(*args, **kwargs):\n from . import core\n\n return core.show(*args, **kwargs)", "def get_features(self):\n return []", "def show_clubs(self):\n self.client.get(f\"{host}/board\")", "async def about(self, ctx):\n\n author_repo = \"https://github.com/scragly\"\n bot_repo = author_repo + \"/Firetail\"\n server_url = \"https://discord.gg/ZWmzTP3\"\n owner = \"Discord: Scragly#5146\\nEVE: Kyo Kuronami\"\n member_count = sum(g.member_count for g in self.bot.guilds)\n server_count = len(self.bot.guilds)\n\n description = (\n \"Made for EVE Online Communities\\n\"\n f\"[**Docs & Source**]({bot_repo})\\n\"\n f\"[**Support Server**]({server_url})\\n\"\n f\"[**Invite Me**]({self.bot.invite_url})\\n\"\n \"\\n\"\n f\"See available commands with:\\n`{ctx.prefix}help`\\n\"\n \"\\n\"\n f\"**Maintained and Developed by**\\n\"\n f\"{owner}\\n\"\n \"\\n\"\n f\"**Servers:** {server_count}\\n\"\n f\"**Members:** {member_count}\\n\"\n f\"**Uptime:** {self.bot.uptime_str}\"\n )\n\n try:\n await ctx.info(\"About Firetail\", description, thumbnail=self.bot.user.avatar_url_as(format='png'))\n\n except discord.HTTPException:\n await ctx.send(\"I need the `Embed links` permission for this command.\")", "def ftp_FEAT(self, line):\n features = ['MDTM','REST STREAM','SIZE','TVFS']\n features.sort()\n self.push(\"211-Features supported:\\r\\n\")\n self.push(\"\".join([\" %s\\r\\n\" %x for x in features]))\n self.respond('211 End FEAT.')", "def supported_features(self):\n return self._support_flags", "def supported_features(self):\n return self._support_flags", "def supported_features(self):\n return self._support_flags", "def supported_features(self):\n return self._support_flags", "def get_all_features(config: Config) -> typing.List[str]:\n return [feature.name for feature in config.features]", "def getFeatures(self):\n return \"1:\" + str(self.getEnergy()) + \\\n \" 2:\" + str(self.getCentroid()) + \\\n \" 3:\" + str(self.getZCrossingRate()) + \\\n \" 4:\" + str(self.getBandwidth())", "async def _load_features(self) -> int:\n for feature in self.config[\"Core\"].get(\"Modules\", []):\n try:\n await self.load_feature(feature)\n except ZeroBotModuleError as ex:\n self.logger.exception(ex)\n return len(self._features)", "def getListOfSpeciesFeatures(self, *args):\n return _libsbml.MultiSpeciesPlugin_getListOfSpeciesFeatures(self, *args)", "async def support(self, ctx):\n await ctx.send('Support server:\\nhttps://discord.gg/dU39sjq')", "def show_all(self):\n cmodules.showModuleData(\n Options.Author,\n Options.Name,\n Options.Call,\n Options.Category,\n Options.Type,\n Options.Version,\n Options.Description,\n Options.License,\n Options.Datecreation,\n Options.Lastmodified\n )\n self.show_commands()\n self.show_opt()", "def show(ctx):\n skale = ctx.obj['skale']\n # from skale.utils.contracts_provision.main import add_test_permissions\n # add_test_permissions(skale)\n show_all_schains_names(skale)", "async def support(self):\n link = \"https://discord.gg/J78uAgZ\"\n await self.bot.say(\"Join my support server if you need help with commands!\\n \" + link)", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "async def show(self, itx: discord.Interaction, /) -> None:\n\n if TYPE_CHECKING:\n assert itx.guild is not None\n\n async with Session() as session:\n guild_prefs = await GuildPref.for_guild(session, itx.guild)\n\n if guild_prefs is not None and guild_prefs.bible_version is not None:\n attribute_id = 'set'\n data = {'version': guild_prefs.bible_version.name}\n else:\n attribute_id = 'not-set'\n data = None\n\n await utils.send_embed(\n itx,\n description=self.localizer.format(\n f'show.{attribute_id}', locale=itx.locale, data=data\n ),\n ephemeral=True,\n )", "def show_help():\n clear_screen()\n print(\"\"\"\n What should we pick up at the store?\\n\n Enter 'DONE' or 'QUIT' to stop adding items.\n Enter 'HELP' for this help.\n Enter 'SHOW' to see your current list.\n Enter 'REMOVE' to remove an item from the list.\n \"\"\")", "def show(self):\n\n pass", "def show():\n from siding.addons import ui\n ui.show()", "def getFeatures(self, gameState, action):\n # features = util.Counter()\n # successor = self.getSuccessor(gameState, action)\n # features['successorScore'] = self.getScore(successor)\n # return features\n if self.isOffensive:\n return self.getOffensiveFeatures(gameState, action)\n else:\n return self.getDefensiveFeatures(gameState, action)", "def test_findFeatures(self):\n features = self.builder._findChanges(\n self.project, self.builder._FEATURE)\n self.assertEquals(\n features,\n [(5, \"We now support the web.\"),\n (12, \"The widget is more robust.\"),\n (15,\n \"A very long feature which takes many words to describe with \"\n \"any accuracy was introduced so that the line wrapping behavior \"\n \"of the news generating code could be verified.\"),\n (16, \"A simpler feature described on multiple lines was added.\")])", "def show_feat(feat_map):\n for i in range(feat_map.shape[0]):\n plt.imshow(feat_map[i])\n plt.show()", "def print_possible_features(self):\n\n f5 = h5py.File(self.train_database[0], 'r')\n mol_name = list(f5.keys())[0]\n mapgrp = f5.get(mol_name + '/mapped_features/')\n\n logger.info('\\nPossible Features:')\n logger.info('-' * 20)\n for feat_type in list(mapgrp.keys()):\n logger.info('== %s' % feat_type)\n for fname in list(mapgrp[feat_type].keys()):\n logger.info(' -- %s' % fname)\n\n if self.select_feature is not None:\n logger.info('\\nYour selection was:')\n for feat_type, feat in self.select_feature.items():\n if feat_type not in list(mapgrp.keys()):\n logger.info(\n '== \\x1b[0;37;41m' + feat_type + '\\x1b[0m')\n else:\n logger.info('== %s' % feat_type)\n if isinstance(feat, str):\n logger.info(' -- %s' % feat)\n if isinstance(feat, list):\n for f in feat:\n logger.info(' -- %s' % f)\n\n logger.info(\"You don't need to specify _chainA _chainB for each feature. \" +\n \"The code will append it automatically\")", "def features(request):\n all_features = Ticket.objects.filter(ticket_type='Feature').order_by(\n '-completion').annotate(\n total_contributions=Sum('contribution__amount'))\n for feature in all_features:\n contributions = Contribution.objects.all().filter(ticket=feature)\n contribution_amount = Decimal(0.00)\n for contribution in contributions:\n contribution_amount += contribution.amount\n\n feature.total_contributions = contribution_amount\n feature.completion = feature.total_contributions/feature.price*100\n\n context = {\n 'features': all_features\n }\n return render(request, 'features.html', context)", "async def list(self, ctx: MyContext):\n if ctx.subcommand_passed is None:\n await ctx.send_help(\"wormhole list\")", "def show(self) -> None:", "def features(self, state, action, next_state):\n raise NotImplementedError" ]
[ "0.7203288", "0.66593146", "0.6396423", "0.62870073", "0.620836", "0.60858047", "0.6041121", "0.6025823", "0.59256774", "0.59256774", "0.59256774", "0.59229654", "0.58711076", "0.580933", "0.57830375", "0.5771818", "0.5696934", "0.56376654", "0.56083906", "0.5593481", "0.55892336", "0.557185", "0.5540858", "0.55303335", "0.5526225", "0.5508139", "0.54993623", "0.54727334", "0.5442467", "0.54369843", "0.5436885", "0.54259837", "0.5399754", "0.5393844", "0.53916234", "0.5374648", "0.5348873", "0.53329456", "0.5318843", "0.52927023", "0.5279384", "0.5276703", "0.52757215", "0.5272292", "0.5262744", "0.5261409", "0.52602327", "0.52571416", "0.52529275", "0.5247878", "0.5241683", "0.5234389", "0.52305174", "0.5230264", "0.5224826", "0.5212251", "0.52036864", "0.5199411", "0.5194307", "0.51909137", "0.51899505", "0.51896757", "0.5180267", "0.5173665", "0.51715887", "0.5167396", "0.51513135", "0.5149586", "0.51369876", "0.5126985", "0.51219904", "0.51211905", "0.5117599", "0.5117599", "0.5117599", "0.5117599", "0.5117388", "0.5108988", "0.51060873", "0.5104839", "0.51032495", "0.51023805", "0.5101489", "0.5091738", "0.5088135", "0.5088135", "0.5088135", "0.5088135", "0.50819206", "0.50547576", "0.5043975", "0.5040775", "0.5040757", "0.50226444", "0.50182045", "0.50173694", "0.50082296", "0.49892956", "0.49881577", "0.4979339" ]
0.81271005
0
why are you using this commands?
async def why(client): yield get_neko_life(client, 'why')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cmd(self):", "def use(self):", "def commands():", "def cli(ctx):", "def cli(ctx):", "def main(self):", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def script(self):", "def sth():", "def cli(ctx):\n #TODO", "def command():\n pass", "def support(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def exo2():", "def execute(self):", "def execute(self):", "def execute(self):", "def execute(self):", "def falcon():", "def main(self):\r\n pass", "def main():\n\tpass", "def commands():\n pass", "def commands():\n pass", "def commands():\n pass", "def commands():\n pass", "def regular(self):", "def __call__(self):\n\t\treturn", "def cx():", "def RUN(self):", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def main():\n pass", "def __call__(self) -> None:", "def degibber(self):", "def CL(self):", "def access():", "def d(self):\n pass", "def d(self):\n pass", "def __call__(self):\n pass", "def __call__(self):\n pass", "def think(self):\n pass", "def basic(self):\n pass", "def cli(ctx):\n pass", "def cli(ctx):\n pass", "def run(self): \r\n return", "def task4_1(self):\n\n pass", "def __call__(object):", "def execute(self):\n\t\tpass", "def task4(self):\n\n pass", "def process(self):", "def process(self):", "def process(self):", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():" ]
[ "0.711543", "0.6806908", "0.6717425", "0.6515996", "0.6515996", "0.65114516", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.649807", "0.6465175", "0.6380806", "0.6345946", "0.6323126", "0.63216954", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6306167", "0.6295243", "0.6293744", "0.6293744", "0.6293744", "0.6293744", "0.62376934", "0.6231465", "0.6229384", "0.6219107", "0.6219107", "0.6219107", "0.6219107", "0.6208499", "0.6201311", "0.6185965", "0.6167977", "0.6155355", "0.6155355", "0.6155355", "0.6155355", "0.6155355", "0.6131136", "0.60936093", "0.60465634", "0.6031625", "0.6001909", "0.59722143", "0.59722143", "0.5971254", "0.5971254", "0.5961022", "0.59599626", "0.59593606", "0.59593606", "0.5936884", "0.5914664", "0.5910664", "0.5910311", "0.5909951", "0.5875309", "0.5875309", "0.5875309", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755", "0.58667755" ]
0.0
-1
Checks whether the user is banned.
async def is_banned(client, event, user: ('user', 'Who should I check?') ): if not event.user_permissions.can_ban_users: abort('You need to have `ban users` permissions to do this.') if not event.channel.cached_permissions_for(client).can_ban_users: abort('I need to have `ban users` permissions to do this.') yield # Acknowledge the event. try: ban_entry = await client.guild_ban_get(event.guild, user) except DiscordException as err: if err.code == ERROR_CODES.unknown_ban: ban_entry = None else: raise embed = Embed(f'Ban entry for {user:f}').add_thumbnail(user.avatar_url) if ban_entry is None: embed.description = 'The user **NOT YET** banned.' else: embed.description = 'The user is banned.' reason = ban_entry.reason if reason is None: reason = '*No reason was specified.*' embed.add_field('Reason:', reason) yield embed
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def bot_check(self, ctx):\n blocked = await self.db.fetchrow(\n \"\"\"\n SELECT *\n FROM blocks\n WHERE user_id=$1\n \"\"\",\n ctx.author.id,\n )\n if blocked is None:\n return True\n raise BlackListed", "def ban_user(cls, user):\n\n banned = False\n with transaction.atomic():\n banned = True if cls.ban_phone_number(user.mobile_phone) else banned\n\n if user.add_mobile_phone:\n banned = True if cls.ban_phone_number(user.add_mobile_phone) else banned\n\n try:\n # Landline phones may be set in regional format and this may cause exceptions.\n # To ban the user we at least need to ban his mobile phone numbers.\n # Landline phones are optional, and it's ok if them would not be added to the ban-list.\n if user.landline_phone:\n banned = True if cls.ban_phone_number(user.landline_phone) else banned\n\n if user.add_landline_phone:\n banned = True if cls.ban_phone_number(user.add_landline_phone) else banned\n\n except Exception:\n pass\n\n\n cls.signals.user_banned.send(cls, user=user)\n return banned", "def is_blacklisted(self, user_id, blacklist_user_id):\n try:\n result = self.table.select(and_(\n self.table.c.user_id == user_id,\n self.table.c.blacklisted_id == blacklist_user_id)).execute()\n if result.rowcount >= 1:\n return True\n elif result.rowcount == 0:\n return False\n except Exception as e:\n self.log(e, self.identifier)\n raise egg_errors.QueryNotPossible", "async def check_for_blacklist(ctx):\n if ctx.guild is None:\n # raise commands.NoPrivateMessage\n return True\n return db.is_blacklisted(ctx)", "def ban(self):\n\n if not self.get_permissions()['banned']:\n banned_group = Group.query.filter(\n Group.banned == True\n ).first()\n\n self.primary_group_id = banned_group.id\n self.save()\n return True\n return False", "def ban_user(self, session, chat_id: int) -> None:\n\n user = session.query(User).get(chat_id)\n if user and user.is_banned is False:\n user.is_banned = True\n session.commit()", "def positive_balance_check(user):\n return has_positive_balance(user)", "def checkbalance(self):\n logging.debug('Checked user balance')", "def get_banned(self):\n return self.execute(TABELLE['id_users']['select']['banned'])", "def checkBan(self, hostmask):\n assert ircutils.isUserHostmask(hostmask), 'got %s' % hostmask\n now = time.time()\n for (pattern, expiration) in self.bans.items():\n if now < expiration or not expiration:\n if ircutils.hostmaskPatternEqual(pattern, hostmask):\n return True\n else:\n self.expiredBans.append((pattern, expiration))\n del self.bans[pattern]\n return False", "async def is_blacklisted(user_id: int) -> bool:\n async with aiosqlite.connect(DATABASE_PATH) as db:\n async with db.execute(\n \"SELECT * FROM blacklist WHERE user_id=?\", (user_id,)\n ) as cursor:\n result = await cursor.fetchone()\n return result is not None", "async def banned(self, ctx: Context):\n if len(self.banned_urls) != 0:\n await ctx.channel.send('\\n'.join(['{}. <{}>'.format(i + 1, url) for i, url in enumerate(self.banned_urls)]))\n else:\n await ctx.channel.send('No profiles have been banned.')", "def test_banned(self) -> None:\n\n self._perform_background_initial_update()\n\n u1 = self.register_user(\"u1\", \"pass\")\n u1token = self.login(\"u1\", \"pass\")\n r1 = self.helper.create_room_as(u1, tok=u1token)\n\n u2 = self.register_user(\"u2\", \"pass\")\n u2token = self.login(\"u2\", \"pass\")\n\n self.helper.join(r1, u2, tok=u2token)\n\n r1stats_ante = self._get_current_stats(\"room\", r1)\n assert r1stats_ante is not None\n\n self.helper.change_membership(r1, u1, u2, \"ban\", tok=u1token)\n\n r1stats_post = self._get_current_stats(\"room\", r1)\n assert r1stats_post is not None\n\n self.assertEqual(\n r1stats_post[\"current_state_events\"] - r1stats_ante[\"current_state_events\"],\n 0,\n )\n self.assertEqual(\n r1stats_post[\"banned_members\"] - r1stats_ante[\"banned_members\"], +1\n )\n self.assertEqual(\n r1stats_post[\"joined_members\"] - r1stats_ante[\"joined_members\"], -1\n )", "def notice(self):\n if(self.data[1][0:18:] == \"*** You are banned\"):\n username = SOCKET_TO_USERID[self.target]\n user_pseudonym = VALIDATED_USERS.get_pseudonym(username)\n network = self.source[1]\n BANHANDLER.add_ban(10080, user_pseudonym, network, self.data[0], 1)\n self.message = self.message + \"\\r\\n :orcbot!@localhost PRIVMSG \"+SOCKET_TO_USERID[self.source]+\" :You've been banned from this server\"\n\n self.send()", "def is_banned(membership):\n if membership is not None and not membership.is_left():\n return membership.is_banned()\n else:\n return False", "def is_blacklisted_username(username):\n settings = api.config.get_settings()\n return username in settings.get(\n \"username_blacklist\", api.config.default_settings[\"username_blacklist\"]\n )", "def _is_blacklisted_user(email):\n blacklisted_user_emails = (db_config.get_value('blacklisted_users') or\n '').splitlines()\n return any(\n utils.emails_equal(email, blacklisted_user_email)\n for blacklisted_user_email in blacklisted_user_emails)", "async def ban(self, ctx, user: discord.Member, reason=\"Banned from guild by Talos\"):\n await user.ban(reason=reason)\n await self.bot.mod_log(ctx, \"ban\", user, reason)\n await ctx.send(f\"User {user} banned\")", "def allowed(self, user, amount):\n return True", "def ban_user(request, user_id):\n # TODO: Missing\n return render(request, 'users/ban_user.html',\n { })", "def ban_user(request, id):\n \n if not request.user.is_staff:\n raise Http404\n\n user = get_object_or_404(User, id = id)\n profile = user.get_profile()\n message = ''\n if profile.banned:\n profile.banned = False\n profile.save()\n else:\n form = BlockUserForm(request.POST or None)\n if form.is_valid():\n profile.banned_until = request.POST['banned_until']\n message = request.POST['message']\n profile.banned = True\n profile.save()\n send_mail('minus.lviv.ua - ะฐะบะฐัƒะฝั‚ ะทะฐะฑะปะพะบะพะฒะฐะฝะพ', u\"\"\"\nะ”ะพะฑั€ะพะณะพ ะดะฝั.\nะ’ะฐัˆ ะฐะบะฐัƒะฝั‚ (%s) ะฝะฐ ัะฐะนั‚ั– minus.lviv.ua ะฑัƒะปะพ ั‚ะธะผั‡ะฐัะพะฒะพ ะฒั–ะดะบะปัŽั‡ะตะฝะพ\nะฒ ะทะฒโ€™ัะทะบัƒ ะท ะฟะพั€ัƒัˆะตะฝะฝัะผ ะฟั€ะฐะฒะธะป ะบะพั€ะธัั‚ัƒะฒะฐะฝะฝั ั€ะตััƒั€ัะพะผ.\n\nะŸั€ะธั‡ะธะฝะฐ: %s\n\nะ‘ะฐะฝ ั‚ั€ะธะฒะฐั” ะดะพ %s\n--------------------\nminus.lviv.ua\n \"\"\" % (profile.fullname(),\n message, \n profile.banned_until),\n settings.DEFAULT_FROM_EMAIL,\n [profile.user.email], fail_silently = False)\n else:\n form.initial = {'banned_until':\\\n datetime.date.today()+datetime.timedelta(days=30),}\n return render_to_response('users/ban_user.html',\n {'form': form, 'user':profile.user},\n context_instance=RequestContext(request))\n return redirect(profile.get_absolute_url())", "async def bans(self, ctx):\n try:\n bans = await self.bot.get_bans(ctx.message.server)\n except discord.Forbidden:\n await self.bot.say('I do not have the proper permissions')\n except discord.HTTPException:\n await self.bot.say('Getting bans failed')\n else:\n await self.bot.say('\\N{SMALL ORANGE DIAMOND}'.join(user.name for user in bans))", "def check_banned(deck_format, card_name):\n if card_name in consts.BANNINGS[deck_format]:\n return True\n return False", "def ban_user(self, user):\n # salvo l'id dell'utente o del bot\n # print(\"Sto negando l'accesso all'user \" + str(user['id']))\n self.execute(TABELLE['id_users']['insert']['complete_user'],\n (user['id'], False, False, False, False, True))", "def check_accepted_bid(self, username):\n good_bids = AuctionBids.objects.filter(good=self, user__username=username, accepted_by_seller=True)\n if good_bids.exists():\n return True\n return False", "def ban(sock, user):\r\n chat(sock, \"/ban {}\".format(user))", "def check_if_bot(self, user_id):\n return str(self.get_int_index(bot_id, 9)) in str(user_id)", "def unban(self):\n\n if self.get_permissions()['banned']:\n member_group = Group.query.filter(\n Group.admin == False,\n Group.super_mod == False,\n Group.mod == False,\n Group.guest == False,\n Group.banned == False\n ).first()\n\n self.primary_group_id = member_group.id\n self.save()\n return True\n return False", "def check_can_ban(membership):\n if membership is not None and not membership.is_left():\n return membership.can_ban()\n else:\n return False", "def confirm_login_allowed(self, user):\n # if the user has been disabled due to incorrect\n # password retries or other.\n if not user.is_active:\n return False; \n return True", "def blocked(self) -> bool:\n return pulumi.get(self, \"blocked\")", "async def ban(self, ctx, user: discord.User = None, *, reason: str = None):\r\n mem = await ctx.guild.fetch_member(user.id)\r\n if mem.top_role >= ctx.author.top_role:\r\n return await ctx.send(\"you can't ban that person\")\r\n\r\n try:\r\n if user is None:\r\n return await ctx.reply(\"Please specify a member to ban\", delete_after=3)\r\n \r\n if user.id == self.bot.user.id:\r\n return await ctx.reply(\"I can't ban myself\", delete_after=3)\r\n\r\n if reason is None:\r\n reason = f\"banned by {ctx.author.name}\"\r\n await ctx.guild.ban(discord.Object(id=user.id), reason=f\"banned by {ctx.author.name}\")\r\n return await ctx.reply(f\"{user.mention} has been banned\", mention_author=False)\r\n\r\n else:\r\n await ctx.reply(f\"{user.mention} has been banned for **`{reason}`**\", mention_author=False)\r\n reason += f\" banned by {ctx.author.name}\"\r\n await ctx.guild.ban(discord.Object(id=user.id), reason=reason)\r\n\r\n except Exception as e:\r\n await ctx.reply(e, delete_after=5)\r\n return print(e)", "def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False", "async def hackban(self, ctx, user_id: int):\n try:\n await self.liara.http.ban(str(user_id), str(ctx.guild.id))\n await ctx.send('Done. Good riddance.')\n except discord.NotFound:\n await ctx.send('That user doesn\\'t exist.')\n except discord.Forbidden:\n await ctx.send('Sorry, I don\\'t have permission to ban that person here.')\n except discord.HTTPException:\n await ctx.send('That ID is invalid.')", "async def process_bj_game(self, ctx, amount, user_id):\n if amount >= 0:\n if not await self.check_in_game(user_id, ctx):\n if amount > await ex.u_currency.get_balance(user_id):\n await ctx.send(f\"> **{ctx.author}, you can not bet more than your current balance.**\")\n else:\n return True\n else:\n await ctx.send(f\"> **{ctx.author}, you can not bet a negative number.**\")", "async def add_blacklist(self, ctx, user: discord.Member):\r\n if user.id not in self.settings['blacklist']:\r\n try:\r\n self.settings['blacklist'].append(user.id)\r\n await ctx.send(\"User blacklisted.\")\r\n except:\r\n await ctx.send(\"An error occured.\")\r\n else:\r\n await ctx.send(\"User already blacklisted.\")", "def is_blacklisted(token):\n if Revoked.query.filter_by(token=token).first():\n return True\n return False", "def is_blacklisted(self):\r\n \r\n in_blacklist = False \r\n if self.chrompos in parser.blacklist:\r\n in_blacklist = True\r\n \r\n return in_blacklist", "def ban(sock, chan, user):\n chat(sock, \".ban {}\\r\\n\".format(user))\n console.info(\"banned user {} from channel {}\".format(user, chan))", "async def hackban(self, ctx, user_id: int, *, reason: str = None):\r\n author = ctx.message.author\r\n server = ctx.message.guild\r\n channel = ctx.message.channel\r\n action = \"Ban\"\r\n if str(server.id) not in self._time:\r\n self._time[str(server.id)] = {}\r\n dataIO.save_json(self._time_file, self._time)\r\n if \"bantime\" not in self._time[str(server.id)]:\r\n self._time[str(server.id)][\"bantime\"] = 0\r\n dataIO.save_json(self._time_file, self._time)\r\n try:\r\n user = await self.bot.get_user_info(user_id)\r\n except discord.errors.NotFound:\r\n await ctx.send(\"The user was not found, check if the ID specified is correct :no_entry:\")\r\n return\r\n except discord.errors.HTTPException:\r\n await ctx.send(\"The ID specified does not exist :no_entry:\")\r\n return\r\n ban_list = await server.bans()\r\n can_ban = channel.permissions_for(ctx.me).ban_members\r\n if user in server.members:\r\n await ctx.send(\"Use the ban command to ban people in the server :no_entry:\")\r\n return\r\n if not can_ban:\r\n await ctx.send(\"I need the `BAN_MEMBERS` permission :no_entry:\")\r\n return\r\n if user == self.bot.user:\r\n await ctx.send(\"I'm not going to ban myself ยฏ\\_(ใƒ„)_/ยฏ\")\r\n return\r\n if author == user:\r\n await ctx.send(\"Why would you want to ban yourself, just leave.\")\r\n return\r\n if user in [x.user for x in ban_list]:\r\n await ctx.send(\"That user is already banned :no_entry:\")\r\n return\r\n try:\r\n await self.bot.http.ban(user_id, server.id, reason=\"Ban made by {}\".format(author))\r\n self._time[str(server.id)][\"bantime\"] = datetime.datetime.utcnow().timestamp()\r\n dataIO.save_json(self._time_file, self._time)\r\n except:\r\n await ctx.send(\"I'm not able to ban that user :no_entry:\")\r\n return\r\n await ctx.send(f\"**{user}** has been banned by ID {self.bot.get_emoji(470063310386233344)}\")\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass", "def add_to_bannedlist_view(request):\n data = {'success': False, 'msg': ''}\n if request.method == 'GET':\n # Check if the current user has already logged in.\n # If user has not logged in, return an error msg to frontend.\n # If user has logged in, let user add banned user he/she doesn't like, to his/her blacklist\n if not request.session.get('login_flag', None):\n data['msg'] = 'user does not log in'\n return JsonResponse(data)\n # else current use is logged in\n curr_user_name = request.session.get('name', None)\n # return curr_user_obj by curr_user_name from login.models.User database\n try:\n curr_user_obj = login.models.User.objects.get(name=curr_user_name)\n except ObjectDoesNotExist:\n data['msg'] = 'does not have user: ' + str(curr_user_name)\n return JsonResponse(data)\n\n try:\n req = simplejson.loads(request.body)\n banned_user_id = req['banned_user_id'].strip()\n except:\n banned_user_id = request.GET.get('banned_user_id')\n # check if input is empty\n if banned_user_id is None:\n data['msg'] = 'banned_user_id is required'\n return JsonResponse(data)\n\n # else input is not empty\n # check if banned_user_id is a positive integer\n try:\n banned_user_id = int(banned_user_id)\n if not (banned_user_id > 0):\n data['msg'] = 'banned_user_id must be a positive integer'\n return JsonResponse(data)\n except:\n data['msg'] = 'banned_user_id must be a positive integer'\n return JsonResponse(data)\n\n try:\n banned_user_obj = login.models.User.objects.get(uid=banned_user_id)\n except ObjectDoesNotExist:\n data['msg'] = 'does not have user with banned_user_id: ' + str(banned_user_id)\n return JsonResponse(data)\n\n if curr_user_obj.uid == banned_user_obj.uid:\n data['msg'] = 'user cannot add itself to its blacklist'\n return JsonResponse(data)\n\n try:\n models.User_banned_list.objects.create(user=curr_user_obj, banned_user=banned_user_obj)\n except IntegrityError:\n data['msg'] = 'banned_user_id: ' + str(banned_user_id) + ' already in blacklist'\n return JsonResponse(data)\n else:\n data['success'] = True\n data['msg'] = 'successfully insert banned_user_id: ' + str(banned_user_id) + ' into blacklist'\n return JsonResponse(data)\n\n else:\n data['msg'] = 'please use GET'\n return JsonResponse(data)", "async def unban(self, ctx, user_id: int, *, reason: str = None):\r\n author = ctx.message.author\r\n server = ctx.message.guild\r\n channel = ctx.message.channel\r\n action = \"Unban\"\r\n if str(server.id) not in self._time:\r\n self._time[str(server.id)] = {}\r\n dataIO.save_json(self._time_file, self._time)\r\n if \"unbantime\" not in self._time[str(server.id)]:\r\n self._time[str(server.id)][\"unbantime\"] = 0\r\n dataIO.save_json(self._time_file, self._time)\r\n try:\r\n user = await self.bot.get_user_info(user_id)\r\n except discord.errors.NotFound:\r\n await ctx.send(\"The user was not found :no_entry:\")\r\n return\r\n except discord.errors.HTTPException:\r\n await ctx.send(\"The ID specified does not exist :no_entry:\")\r\n return\r\n can_ban = channel.permissions_for(ctx.me).ban_members\r\n if not can_ban:\r\n await ctx.send(\"I need the `BAN_MEMBERS` permission :no_entry:\")\r\n return\r\n ban_list = await server.bans()\r\n invite = await channel.create_invite(max_age=86400, max_uses=1)\r\n s = discord.Embed(title=\"You have been unbanned from {}\".format(server.name),\r\n description=\"Feel free to join back whenever.\", colour=000000,\r\n timestamp=__import__('datetime').datetime.utcnow())\r\n s.set_thumbnail(url=server.icon_url)\r\n s.add_field(name=\"Moderator\", value=\"{} ({})\".format(author, str(author.id)), inline=False)\r\n s.add_field(name=\"Invite\", value=\"{} (This will expire in 1 week)\".format(str(invite)))\r\n if user == author:\r\n await ctx.send(\"You can't unban yourself :no_entry:\")\r\n return\r\n if user == self.bot.user:\r\n await ctx.send(\"I'm not even banned ยฏ\\_(ใƒ„)_/ยฏ\")\r\n return\r\n i = 0\r\n n = 0\r\n if user in [x.user for x in ban_list]:\r\n pass\r\n else:\r\n await ctx.send(\"That user is not banned :no_entry:\")\r\n return\r\n try:\r\n await server.unban(user, reason=\"Unban made by {}\".format(author))\r\n self._time[str(server.id)][\"unbantime\"] = datetime.datetime.utcnow().timestamp()\r\n dataIO.save_json(self._time_file, self._time)\r\n except discord.errors.Forbidden:\r\n await ctx.send(\"I need the **Ban Members** permission to unban :no_entry:\")\r\n return\r\n await ctx.send(\"**{}** has been unbanned :white_check_mark:\".format(user))\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n try:\r\n await user.send(embed=s)\r\n except:\r\n pass", "async def test_ban(self, ctx):\n try:\n pass\n except Exception as e:\n await zb.bot_errors(ctx,sp.format(e))", "def checkIfAllowed(self, user):\n\n # Default case if mod access is not needed everyone has access\n if not self.modOnlyAccess:\n return True\n\n # Otherwise check the user's access level\n if user.modAccess == self.modOnlyAccess:\n return True\n else:\n return False", "def _user_assigned_bell(self, bell: Bell) -> bool:\n return not self._bot_assigned_bell(bell)", "def check_active(self, user):\r\n if not self.require_active:\r\n # Ignore & move on.\r\n return True\r\n\r\n return user.is_active", "def unban_user(self, session, chat_id: int) -> None:\n\n user = session.query(User).get(chat_id)\n if user.is_banned is True:\n user.is_banned = False\n session.commit()", "def liberate_user(cls, user):\n liberated = False\n if user.mobile_phone and cls.remove_banned_number(user.mobile_phone):\n liberated = True\n\n if user.add_mobile_phone and cls.remove_banned_number(user.mobile_phone):\n liberated = True\n\n if user.landline_phone and cls.remove_banned_number(user.landline_phone):\n liberated = True\n\n if user.add_landline_phone and cls.remove_banned_number(user.add_landline_phone):\n liberated = True\n\n if user.mobile_phone and cls.remove_suspicious_number(user.mobile_phone):\n liberated = True\n\n if user.add_mobile_phone and cls.remove_suspicious_number(user.add_mobile_phone):\n liberated = True\n\n if user.landline_phone and cls.remove_suspicious_number(user.landline_phone):\n liberated = True\n\n if user.add_landline_phone and cls.remove_suspicious_number(user.add_landline_phone):\n liberated = True\n\n if liberated:\n cls.signals.user_liberated.send(cls, user=user)\n\n return liberated", "def anti_bot(self, message):\n msg_list = self.ts.get_human_readable_message(message).lower().split(' ')\n bot_creation_date = self._get_creation_date(msg_list[1])\n viewers = self.ts.fetch_chatters_from_API()['viewers']\n mod_list = self.ts.get_mods()\n with codecs.open('whitelist.json', 'r', 'utf-8') as f:\n whitelist = json.load(f)\n for viewer in viewers:\n if self._get_creation_date(viewer) == bot_creation_date and viewer not in whitelist:\n self.ts.send_message('/ban {}'.format(viewer))\n mod_str = ', '.join(mod_list)\n self._add_to_whisper_queue(viewer, 'We\\'re currently experiencing a bot attack. If you\\'re a human and were accidentally banned, please whisper a mod: {}'.format(mod_str))", "def has_user(self, user): # pylint: disable=unused-argument\r\n return False", "async def unban(self, ctx, name: str):\n try:\n bans = await self.bot.get_bans(ctx.message.server)\n user = discord.utils.get(bans, name=name)\n if user is not None:\n await self.bot.unban(ctx.message.server, user)\n except discord.Forbidden:\n await self.bot.say('I do not have the proper permissions')\n except discord.HTTPException:\n await self.bot.say('Unbanning failed')\n else:\n await self.bot.say('\\N{OK HAND SIGN}')", "async def ban(self, context, user: discord.User, reason: str = \"Not specified\"):\n author = await context.guild.fetch_member(context.author_id)\n if not author.guild_permissions.ban_members:\n embed = discord.Embed(\n title=\"Error!\",\n description=\"You don't have enough permissions to ban this user.\",\n color=0xE02B2B\n )\n return await context.send(embed=embed)\n member = await context.guild.fetch_member(user.id)\n try:\n if member.guild_permissions.administrator:\n embed = discord.Embed(\n title=\"Error!\",\n description=\"User has Admin permissions.\",\n color=0xE02B2B\n )\n await context.send(embed=embed)\n else:\n await member.ban(reason=reason)\n embed = discord.Embed(\n title=\"User Banned!\",\n description=f\"**{member}** was banned by **{context.author}**!\",\n color=0x42F56C\n )\n embed.add_field(\n name=\"Reason:\",\n value=reason\n )\n await context.send(embed=embed)\n await member.send(f\"You were banned by **{context.author}**!\\nReason: {reason}\")\n except:\n embed = discord.Embed(\n title=\"Error!\",\n description=\"An error occurred while trying to ban the user. Make sure my role is above the role of the user you want to ban.\",\n color=0xE02B2B\n )\n await context.send(embed=embed)", "def test_cant_ban_user_from_community_if_already_banned(self):\n user = make_user()\n headers = make_authentication_headers_for_user(user)\n\n community = make_community(creator=user, type='P')\n community_name = community.name\n\n user_to_ban = make_user()\n user.ban_user_with_username_from_community_with_name(username=user_to_ban.username,\n community_name=community_name)\n\n url = self._get_url(community_name=community.name)\n response = self.client.post(url, **headers)\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n self.assertTrue(user_to_ban.is_banned_from_community_with_name(community.name))", "async def ban_command(self,ctx,member_id,how_much_days,why):\n perm_check = await self.perm_check(ctx,self.bot.guilds_data[str(ctx.guild.id)][\"role_perm_command\"][\"ban\"])\n if perm_check == \"pass\":\n member = utils.get(ctx.guild.members,id=int(member_id))\n self.bot.users_data[str(ctx.guild.id)][member_id][\"CriminalRecord\"][\"BanInfo\"][\"IsBanned\"] = True\n self.bot.users_data[str(ctx.guild.id)][member_id][\"CriminalRecord\"][\"BanInfo\"][\"Why\"] = str(why)\n self.bot.users_data[str(ctx.guild.id)][member_id][\"CriminalRecord\"][\"BanInfo\"][\"WhoAtBanned\"] = str(ctx.author.name)\n if how_much_days == \"def\":\n self.bot.users_data[str(ctx.guild.id)][member_id][\"CriminalRecord\"][\"BanInfo\"][\"Definitive\"] = True\n else:\n self.bot.users_data[str(ctx.guild.id)][member_id][\"CriminalRecord\"][\"BanSystem\"] = {\"day_counter\": 0,\"how_much_days\": how_much_days}\n return await member.ban(reason=why)", "async def ban(\n self, context: Context, user: discord.User, *, reason: str = \"Not specified\"\n ) -> None:\n member = context.guild.get_member(user.id) or await context.guild.fetch_member(\n user.id\n )\n try:\n if member.guild_permissions.administrator:\n embed = discord.Embed(\n description=\"User has administrator permissions.\", color=0xE02B2B\n )\n await context.send(embed=embed)\n else:\n embed = discord.Embed(\n description=f\"**{member}** was banned by **{context.author}**!\",\n color=0x9C84EF,\n )\n embed.add_field(name=\"Reason:\", value=reason)\n await context.send(embed=embed)\n try:\n await member.send(\n f\"You were banned by **{context.author}** from **{context.guild.name}**!\\nReason: {reason}\"\n )\n except:\n # Couldn't send a message in the private messages of the user\n pass\n await member.ban(reason=reason)\n except:\n embed = discord.Embed(\n title=\"Error!\",\n description=\"An error occurred while trying to ban the user. Make sure my role is above the role of the user you want to ban.\",\n color=0xE02B2B,\n )\n await context.send(embed=embed)", "def remove_from_bannedlist_view(request):\n data = {'success': False, 'msg': ''}\n if request.method == 'GET':\n # check if the user has already logged in.\n # if user has not logged in, return an error msg to frontend.\n # if user has logged in, let user remove banned_user from his/her blacklist\n if not request.session.get('login_flag', None):\n data['msg'] = 'user does not log in'\n return JsonResponse(data)\n # else use is logged in\n user_name = request.session.get('name', None)\n # return user_obj by user_name from login.models.User database\n try:\n user_obj = login.models.User.objects.get(name=user_name)\n except ObjectDoesNotExist:\n data['msg'] = 'does not have user: ' + str(user_name)\n return JsonResponse(data)\n\n try:\n req = simplejson.loads(request.body)\n banned_user_id = req['banned_user_id'].strip()\n except:\n banned_user_id = request.GET.get('banned_user_id')\n # check if input is empty\n if banned_user_id is None:\n data['msg'] = 'banned_user_id is required'\n return JsonResponse(data)\n # else input is not empty\n # check if banned_user_id is a positive integer\n try:\n banned_user_id = int(banned_user_id)\n if not (banned_user_id) > 0:\n data['msg'] = 'banned_user_id must be a positive integer'\n return JsonResponse(data)\n except:\n data['msg'] = 'banned_user_id must be a positive integer'\n return JsonResponse(data)\n\n try:\n banned_user_obj = login.models.User.objects.get(uid=banned_user_id)\n except ObjectDoesNotExist:\n data['msg'] = 'does not have user with banned_user_id: ' + str(banned_user_id)\n return JsonResponse(data)\n\n try:\n models.User_banned_list.objects.get(user=user_obj, banned_user=banned_user_obj).delete()\n except ObjectDoesNotExist:\n data['msg'] = \"user with banned_user_id: \" + str(banned_user_id) + ' is not in blacklist'\n return JsonResponse(data)\n else:\n data['success'] = True\n data['msg'] = 'successfully remove user from blacklist'\n return JsonResponse(data)\n else:\n data['msg'] = 'please use GET'\n return JsonResponse(data)", "def is_access_allowed(self, user_id):\n ### DATABASE CODE GOES HERE\n return False", "def is_on_waiting_list(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.event.waiting_list:\n return True\n return False", "def is_blacklisted(cls, msg):\n return is_blacklisted(msg.fields.get('from_addr'))", "def is_forbidden(self, request):\n return common.get_extension(str(request.url().toString())) in self.banned_extensions", "def is_whitelisted(self, instance_id):\n item = self.get_whitelist_instance(instance_id)\n if item is None:\n return False\n else:\n return True", "def check_blacklist(auth_token):\n token = BlacklistToken.query.filter_by(token=str(auth_token)).first()\n if token:\n return True\n\n return False", "def has_user(self):\n\t\treturn len( self.a_token ) > 0 and len( self.a_secret ) > 0", "def block_user(user_id):\n\n if not g.user:\n flash(\"Access unauthorized.\", \"danger\")\n return redirect(\"/\")\n\n user = User.query.get_or_404(user_id)\n users_blocking = [block.user_blocking_id for block in Blocks.query.all() if block.user_being_blocked_id == g.user.id]\n likes = [message for message in user.likes if message.user_id not in users_blocking]\n return render_template('users/blocked-users.html', user=user, likes=likes)", "def test_logs_user_banned(self):\n user = make_user()\n headers = make_authentication_headers_for_user(user)\n\n other_user = make_user()\n community = make_community(creator=other_user, type='P')\n community_name = community.name\n\n user.join_community_with_name(community_name)\n other_user.add_moderator_with_username_to_community_with_name(username=user.username,\n community_name=community.name)\n\n user_to_ban = make_user()\n\n url = self._get_url(community_name=community.name)\n self.client.post(url, {\n 'username': user_to_ban.username\n }, **headers)\n\n self.assertTrue(community.logs.filter(action_type='B',\n source_user=user,\n target_user=user_to_ban).exists())", "def check_balance():\n print(\"\\n\")\n print(messages.check_balance)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.check_balance(credentials)\n start_again() if result else BankOperationsUi.check_balance()", "def isBlocked(self):\n cal = self.request.get('form.widgets.calendarConfig')\n if cal is not None:\n return (cal == ['bloque'])\n wrapper = getSAWrapper('gites_wallons')\n session = wrapper.session\n for heb in getHebergementsForProprio(self.context, session):\n if heb.heb_calendrier_proprio == 'bloque':\n return True\n return False", "def mode(self):\n #TODO: detection of channelbans be optional and moved into config\n if(len(self.data)==3):\n print self.data\n if re.match(\"\\+b\", self.data[1]):\n username = SOCKET_TO_USERID[self.target]\n banned_user = self.data[2].split(\"!\")[1].split(\"@\")[0].strip(\"*\").strip(\"~\")\n if(username == banned_user):\n print \"about to be BANNED\", username, \" \", banned_user\n user_pseudonym = VALIDATED_USERS.get_pseudonym(username)\n network = self.source[1]\n BANHANDLER.add_ban(10080, user_pseudonym, network, self.data[0], 0)\n self.send()", "def check_if_enabled(self, user):\n\t\tfrom webnotes.utils import cint\n\t\tif user=='Administrator': return\n\t\tif not cint(webnotes.conn.get_value('Profile', user, 'enabled')):\n\t\t\tself.fail('User disabled or missing')", "async def ban(self, ctx, user: discord.Member=None, *, reason: str=None):\n author = ctx.author\n guild = ctx.guild\n if user is None:\n await ctx.send(f\"You need to mention a user to ban.\\n`{ctx.prefix}ban @user`\")\n return\n if author is user:\n await ctx.send(\"Baka, don't ban yourself!\")\n return\n if reason is None:\n try:\n await guild.ban(user, reason=f\"[{author.name}] - No reason was specified.\", delete_message_days=7)\n except discord.Forbidden:\n await ctx.send(\"I'm not allowed to do that.\")\n else:\n await ctx.send(f\"I banned them, senpai~{author.name}! :blush:\")\n else:\n try:\n await guild.ban(user, reason=f\"[{author.name}] - {reason}\", delete_message_days=7)\n except discord.Forbidden:\n await ctx.send(\"I'm not allowed to do that.\")\n else:\n await ctx.send(f\"I banned them, senpai~{author.name}! :blush:\")", "async def user_banned_button(self, payload: discord.RawReactionActionEvent) -> None:\n\n self.bits = flip_action_bits(LoggingActions.USER_BANNED, self.bits)\n await self.update_embed()", "def allow(self, message):\n if message.author.id == Guard.AUTHOR:\n return True\n if message.author.id in Guard.BANNED_USERS:\n return False\n if self.state == State.TRUSTED_ONLY and not Guard.is_trusted(message):\n return False\n if self.state == State.SUDO_ONLY and not Guard.allow_sudo(message):\n return False\n return True", "def usercheck(userip):\n if userip in session:\n return userstate.check(session[userip])\n else:\n return False", "def _checkUserInactive(username,self):\r\n active = False\r\n user = _findUser(username)\r\n \r\n if user is not None:\r\n active = user.getIsActive()\r\n \r\n return active", "def check_token_in_blacklist(decrypted_token):\n from .models import BlacklistToken\n jti = decrypted_token['jti']\n\n if BlacklistToken.check_blacklist(jti):\n raise InvalidToken(\"Token is blacklisted. Please log in again.\")\n\n return False", "def is_whitelisted(self, instance_id):\n return self.is_tagged(instance_id, 'Whitelisted')", "def check_if_token_in_blacklist(decrypted_token):\n return (\n decrypted_token[\"jti\"] in BLACKLIST\n ) # if True, go to revoked_token_callback", "def confirm_login_allowed(self, user):\r\n if not user.is_active:\r\n raise forms.ValidationError(\r\n self.error_messages['inactive'],\r\n code='inactive',\r\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "async def ban_users(request: Request, user_list: list[User]) -> ModBan:\n conn: Connection = request.state.db_conn\n users = [user.user_id for user in user_list]\n\n records = await conn.fetch(\"SELECT * FROM users WHERE user_id=any($1::bigint[])\", tuple(users))\n db_users = [record[\"user_id\"] for record in records]\n\n non_db_users = set(users) - set(db_users)\n\n async with conn.transaction():\n # Ref:\n # https://magicstack.github.io/asyncpg/current/faq.html#why-do-i-get-postgressyntaxerror-when-using-expression-in-1\n await conn.execute(\"UPDATE users SET is_banned=TRUE WHERE user_id=any($1::bigint[])\", db_users)\n await conn.execute(\"UPDATE pixel_history SET deleted=TRUE WHERE user_id=any($1::bigint[])\", db_users)\n\n await request.state.canvas.sync_cache(conn, skip_check=True)\n\n return ModBan(banned=db_users, not_found=list(non_db_users))", "async def remove_blacklist(self, ctx, user: discord.Member):\r\n if user.id not in self.settings['blacklist']:\r\n await ctx.send(\"User is not blacklisted.\")\r\n else:\r\n self.settings['blacklist'].remove(user.id)\r\n await ctx.send(\"User removed from blacklist.\")", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "async def hackban(\n self, context: Context, user_id: str, *, reason: str = \"Not specified\"\n ) -> None:\n try:\n await self.bot.http.ban(user_id, context.guild.id, reason=reason)\n user = self.bot.get_user(int(user_id)) or await self.bot.fetch_user(\n int(user_id)\n )\n embed = discord.Embed(\n description=f\"**{user}** (ID: {user_id}) was banned by **{context.author}**!\",\n color=0x9C84EF,\n )\n embed.add_field(name=\"Reason:\", value=reason)\n await context.send(embed=embed)\n except Exception as e:\n embed = discord.Embed(\n description=\"An error occurred while trying to ban the user. Make sure ID is an existing ID that belongs to a user.\",\n color=0xE02B2B,\n )\n await context.send(embed=embed)", "def check_is_admin(current_user):\n return current_user['isAdmin'] == True", "async def check_in_game(user_id, ctx): # this is meant for when it is accessed by commands outside of BlackJack.\n check = ex.first_result(await ex.conn.fetchrow(\"SELECT COUNT(*) From blackjack.games WHERE player1 = $1 OR player2 = $1\", user_id))\n if check:\n await ctx.send(f\"> **{ctx.author}, you are already in a pending/active game. Please type {await ex.get_server_prefix_by_context(ctx)}endgame.**\")\n return True", "def check_user(self):\n try:\n if self.get_customer()[0][0] == self.dni:\n return True\n else:\n return False\n except:\n return False", "async def ban(self, ctx, \n\t\ttarget: BanCandidateConverter, \n\t\t*, reason: str = \"No reason given.\"\n\t):\n\n\t\tself.check_perms(ctx.author, target)\n\t\t\n\t\thandler = await Handler.new(self.bot, ctx.guild)\n\t\tawait handler.ban(ctx.author, target, reason)\n\n\t\tawait ctx.success(f\"{target} (`{target.id}`) has been banned for:\\n{reason}\")", "async def votetoban(ctx, *, user: d.Member):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n logger.info('votetoban: ' + user.mention, extra={'invoker': ctx.message.author.name})\r\n if ctx.guild.id != DGBANSERVERID:\r\n return\r\n for member in ctx.guild.members:\r\n if (not str(member.status) == 'offline') \\\r\n and ctx.channel.permissions_for(member).administrator:\r\n await ctx.send(member.mention + ', someone requests for ' + user.mention + ' to be banned!')\r\n return\r\n DOBAN = '้™œ}๏ฝฉ'\r\n NOBAN = '้™œqd'\r\n msg = await ctx.send('**Vote to ban ' + user.mention + '**\\nReact ' + DOBAN + ' to vote to ban; react ' + NOBAN + ' to vote to keep.')\r\n await msg.add_reaction(DOBAN)\r\n await msg.add_reaction(NOBAN)\r\n try:\r\n await ctx.bot.wait_for('member_update',\r\n check=lambda o, m: \\\r\n ctx.channel.permissions_for(m).administrator \\\r\n and not str(m.status) == 'offline',\r\n timeout=180.0\r\n )\r\n await msg.delete()\r\n await ctx.send('An admin has come online! The vote has been cancelled. Please ask them instead.')\r\n except a.TimeoutError:\r\n msg = await ctx.get_message(msg.id)\r\n dos = 0\r\n nos = 0\r\n for r in msg.reactions:\r\n if r.emoji == DOBAN:\r\n dos = r.count - 1\r\n elif r.emoji == NOBAN:\r\n nos = r.count - 1\r\n await msg.delete()\r\n if dos + nos < 3:\r\n await ctx.send('Not enough people voted! ({} total, minimum is 3.) The user stays.'.format(dos + nos))\r\n elif dos > nos:\r\n await ctx.send('{} votes for and {} votes against. The user has been banned.'.format(dos, nos))\r\n await user.ban(reason='Banned after vote'\r\n + ' {} against {}'.format(dos, nos)\r\n + ' when admins were gone.')\r\n else:\r\n await ctx.send('{} votes for and {} votes against. The user stays.'.format(dos, nos))", "async def checkBalance(message, db):\n user = message.author\n userInfo = getUser(db, user.id)\n messageSendBack = ''\n if userInfo is None:\n logger.error(f\"User {user.id} check balance failed\")\n messageSendBack = '็ณป็ปŸ้”™่ฏฏ'\n else:\n displayMoney = userInfo[1] / 100\n messageSendBack = f\"{user.display_name}๏ผŒไฝ ่ฟ˜ๆœ‰{displayMoney}ๅ…ƒ\"\n await message.channel.send(messageSendBack)", "def _nick_blocked(self, nick: str) -> bool:\n bad_nicks = self.config.core.nick_blocks\n for bad_nick in bad_nicks:\n bad_nick = bad_nick.strip()\n if not bad_nick:\n continue\n if (re.match(bad_nick + '$', nick, re.IGNORECASE) or\n self.make_identifier(bad_nick) == nick):\n return True\n return False", "def checkIgnored(self, hostmask):\n if self.lobotomized:\n return True\n if world.testing:\n return False\n assert ircutils.isUserHostmask(hostmask), 'got %s' % hostmask\n if self.checkBan(hostmask):\n return True\n now = time.time()\n for (pattern, expiration) in self.ignores.items():\n if now < expiration or not expiration:\n if ircutils.hostmaskPatternEqual(pattern, hostmask):\n return True\n else:\n del self.ignores[pattern]\n # Later we may wish to keep expiredIgnores, but not now.\n return False", "async def banish(self, ctx : commands.Context, member: discord.Member, *, reason: str = None):\n if await checks.check_priv(ctx, member):\n return\n try:\n await member.ban(reason=default.responsible(ctx.author, reason))\n embed = discord.Embed(\n color = 0x2F3136\n )\n embed.set_footer(text=f\"Command invoked by {ctx.author}\")\n embed.set_author(name=f\"โœ… {member.name} has been banned from the server\", icon_url=member.avatar_url)\n await ctx.send(embed=embed)\n await member.send(f\"You've been banned from **{ctx.guild.name}** for **{reason}** by **{ctx.author}**\")\n\n log_channel = self.bot.get_channel(self.logs(ctx.guild.id))\n if log_channel:\n embed = discord.Embed(\n title=\"Ban ๐Ÿ“\",\n description=f\"**User banned:** `{member}`\\n**Moderator:** `{ctx.author}`\\n**Reason:** `{reason}`\"\n )\n await log_channel.send(embed=embed)\n\n except Exception as e:\n await ctx.send(e)", "async def ban(ctx, member: discord.Member, *, reason=None):\n await member.ban(reason=reason)\n await ctx.send(f\"User {member} has been banned\")", "def is_bankrupted(self):\n return self.status == self.PLAYER_BANKRUPT", "def is_not_admin(user):\n return not user.is_superuser", "def get_user_boosted(user, status):\n return status.boosters.filter(user=user).exists()" ]
[ "0.7057368", "0.66175896", "0.6572682", "0.65161735", "0.65136915", "0.64439887", "0.6424201", "0.6409703", "0.6405958", "0.6392394", "0.6381361", "0.636146", "0.63393056", "0.62737966", "0.6236421", "0.61819464", "0.6176241", "0.61632884", "0.61577547", "0.6119847", "0.6095721", "0.6029551", "0.60176265", "0.59980583", "0.59930474", "0.59665686", "0.5965653", "0.5927308", "0.5925706", "0.58977467", "0.5895427", "0.5889643", "0.5878103", "0.58512956", "0.5844636", "0.5831929", "0.5808706", "0.5805466", "0.58007056", "0.57962465", "0.5775342", "0.5763614", "0.5729121", "0.57092416", "0.57091653", "0.5708795", "0.5693133", "0.56892246", "0.5683427", "0.56789756", "0.5669775", "0.566786", "0.5666687", "0.5663851", "0.5636567", "0.5634448", "0.5630667", "0.56274635", "0.5607727", "0.5594084", "0.559191", "0.5590157", "0.55874157", "0.5585171", "0.557413", "0.55712634", "0.5567547", "0.55653375", "0.55652106", "0.5558935", "0.5548719", "0.5541541", "0.553389", "0.5527807", "0.5526202", "0.5523276", "0.55073744", "0.5488077", "0.5484816", "0.54806584", "0.54771984", "0.5472532", "0.5472532", "0.5472532", "0.5472532", "0.5472532", "0.5464972", "0.5455452", "0.5455343", "0.5449998", "0.5447588", "0.5445997", "0.54394126", "0.5434404", "0.54306334", "0.5416804", "0.5395667", "0.53922784", "0.5376983", "0.53755075" ]
0.6633141
1
Shows your or the selected user's id.
async def user_id( event, user_id: ('user_id', 'Get the id of an other user?', 'user') = None, ): if user_id is None: user_id = event.user.id return str(user_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def view_user(request, user_id):\n return render(request, 'users/view_user.html',\n { })", "def show_user(user_id):\n user = User.query.get(user_id)\n\n return render_template('user.html',user=user)", "def show_user():\n\n return render_template('user/show_by_user.html', title='Show Profile', user = current_user)", "def show(user_id):\n if user_id != current_user.id:\n return abort(403)\n\n user = get_user(user_id)\n return render_template('users/show.html'\n ,user=user\n ,t=t\n ,m=m)", "def id(user=None):\n command = \"id {0}\".format(user) if user else \"id\"\n system_command(command)", "def show_user(user_id):\n user = User.query.get_or_404(user_id)\n\n return render_template(\"show-user.html\", user=user)", "def do_user_show(cs, args):\n key = args.user\n if cs.users.is_id(key):\n id = key\n else:\n id = cs.users.get_id_by_name(key)\n _, user = cs.users.get(id)\n utils.print_dict(user)", "def get_id(self): \n\t\treturn (self.user_id)", "def get_user_id(self):\n return self.id_user", "def show_user(user_id):\n user = User.query.get_or_404(user_id)\n return render_template(\"users/details.html\", user=user)", "def show(user_id):\n return users.get_or_404(user_id)", "def id(self) -> int:\n return self.user.id", "def get_id(self):\n return self.user_id", "def user_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"user_id\")", "def show_me_user(user_id):\n\n user = crud.get_user_by_id(user_id)\n\n return render_template('user_details.html', user=user)", "def user(request, user_id):\n raise NotImplementedError", "def display_user():\n return User.display_user()", "def display_user():\n return User.display_user()", "def show_user(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template('users/profile.html', user=user)", "def user_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"user_id\")", "def show_user(request):\n return _show_user(request)", "def show_user(request):\n return _show_user(request)", "def _get_user_id(self, user: Optional[Dict[str, Any]]) -> Optional[str]:\n return user[\"id\"] if user and \"id\" in user else None", "def show_user_info(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template(\"user_details.html\", user=user)", "def user(self, user):\n self.user_id = user.get_id()", "def show_users():\n return 'hehe'", "def get_id(self) -> int:\n return self.user_id", "def user_id(self):\n return self.status.user[\"id\"]", "def user_detail(user_id):\n\n user = User.query.get(user_id)\n return render_template(\"user.html\", user=user)", "def user_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_id\")", "def user_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_id\")", "def show_user(user_id):\n\n user = crud.get_user_by_id(user_id)\n\n return render_template('user_profile.html', user = user)", "async def getuserid(ctx, user=None):\n if user == None:\n await ctx.send(f\"Your user ID is `{ctx.message.author.id}`.\")\n elif user[:3] != \"<@!\":\n member = ctx.message.guild.get_member_named(user)\n await ctx.send(f\"The user ID of {user} is: `{member.id}`\")\n else:\n user = user.replace(\"<@!\", \"\").replace(\">\", \"\")\n await ctx.send(f\"The user ID of <@{user}> is `{user}`.\")", "def user_id(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"user_id\")", "def user_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_id\")", "def user_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_id\")", "def user_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_id\")", "def get_id(self):\r\n return self.username", "def user_id(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"user_id\")", "def user_id(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"user_id\")", "def get_user(self):\n return str(self.request.user.id)", "def mine(request):\n request.user_to_show = request.user\n return _show_user(request)", "def mine(request):\n request.user_to_show = request.user\n return _show_user(request)", "def user_id(self) -> str:\n return self._user_id", "def user_id(self) -> str:\n return self._user_id", "def get_user_id():\n user_id = session.get(\"user_id\")\n return user_id if user_id else None", "def idme(bot, update):\n update.message.reply_text(\"Your ID is: \" + str(update.message.from_user.id))", "def get_user(id):\n pass", "def user_id(self):\n return lamin_user_settings().id", "def print_user(self, user):\n status = \"active\"\n token = user.token\n\n if token in [\"finished\", \"revoked\"]:\n status = token\n\n if token is None:\n token = \"\"\n\n subid = \"%s\\t%s[%s]\" % (user.id, token, status)\n print(subid)\n return subid", "def get_user_id(self, details, response):\n return details[\"user_id\"]", "def get_user_id(self):\n raise NotImplementedError", "def cli(ctx, user_id, deleted=False):\n return ctx.gi.users.show_user(user_id, deleted=deleted)", "def user():\r\n return render_base_template(\"user.html\", user=current_user)", "def get_username_and_id(self, obj):\n return \"%s - %s\" % (obj.user.username, obj.user.id)", "def get_new_id(self) -> str:\n user = self.get_template(list_entries=False)\n return user.id", "async def _idavatar(self, ctx, userid: int = None):\n e = discord.Embed(color=discord.Color.blurple())\n if not userid:\n user = ctx.author\n else:\n try:\n user = await ctx.bot.fetch_user(int(userid))\n if user is None:\n raise Exception(\"User is None.\")\n except Exception as e:\n await ctx.send(f\"Failed to catch user: {e}\")\n e.set_image(url=user.avatar_url)\n e.set_author(name=f\"{user.name}'s avatar\", icon_url=user.avatar_url, url=user.avatar_url)\n e.set_footer(text=f\"{ctx.author.name} wanted to see.\", icon_url=ctx.author.avatar_url)\n await ctx.send(embed=e)", "def user_id(self):\n return self._user_id", "def user_id(self):\n return self._user_id", "def user_id(self):\n return self._user_id", "def user_id(self):\n return self._user_id", "def user_id(self):\n return self._user_id", "def get_id(self):\n return self.username", "def get_id(self):\n return self.username", "def get_id(self):\n return self.username", "async def id(ctx, user: discord.Member = None):\n user = user or ctx.message.author\n with open('users.json') as f:\n data = json.load(f)\n\n if data.get(user.id) is not None:\n await bot.say('`User id is {}`'.format(user.id))\n else:\n await bot.say(f'I can not seem to grab your id')", "def user_profile(request, id):\n user = User.objects.get(id=id)\n\n return render(request, \"core/profile.html\",{\n \"user\": user,\n \"range\": range(user.stars),\n \"bids_placed\": BuyProduct.objects.filter(\n customer = user\n )\n })", "def user_id_str(self):\n return str(self.status.user['id'])", "def get_user_id(self, details, response):\n return response.get(\"sub\")", "def get_user_id(self, session, **kwargs):\n return None", "def user_id(self):\n # type: () -> string_types\n return self._user_id", "def get_identifier(self, request):\r\n return request.user.username", "def current_user_id(data_client):\n try:\n return data_client.current_user().id\n except tk.HTTPError as error:\n skip_or_fail(tk.HTTPError, \"ID of current user could not be retrieved!\", error)", "def get_user_id():\n csc_name = get_user_csc_name()\n if csc_name:\n return csc_name\n haka_id = get_user_haka_identifier()\n if haka_id:\n return haka_id\n return None", "def get_user_id(self, details, response):\n return details['username']", "def __int__(self):\r\n return self.userid", "def get_accessible_user_id(self):\n ### DATABASE CODE GOES HERE\n return 1", "def detail(id):\n\tu = User.objects.get_or_404(id=id)\n\t#c = get_school_context(u)\n\t#if not g.school==c:\n\t#\tflash(_(\"You've been redirected to the school the user is following.\"))\n\t#\treturn redirect(url_for_school('users.detail', school=c, id=u.id), code=301)\n\treturn render_template('user/detail.html',\n\t\ttitle = u.display_name,\n\t\tuser = u)", "def edit_user(user_id):\n user = User.query.get_or_404(user_id)\n\n return render_template('edit-user.html', user=user)", "def show_user_profile(user_id):\n user = User.query.filter_by(user_id=user_id).first()\n\n return render_template(\"user_profile.html\", user=user)", "def view_profile():\n user_id = session.get(\"user_id\")\n \n user = User.query.get(session[\"user_id\"])\n \n return render_template(\"editable_profile_page.html\", user=user)", "async def userfromid(ctx, iden:int):\n user = bot.get_user(iden)\n await ctx.send(user.mention)", "def get_user_id(self, details, response):\n return response['uid']", "def display_user(user_id=\"\"):\n user_obj = storage.get(\"User\", user_id)\n if user_obj:\n return jsonify(user_obj.to_dict())\n else:\n abort(404)", "def view_user(request, userid):\n user_to_view = User.objects.get(id=userid)\n profile = Profile.objects.get(user=user_to_view)\n dogs = Dog.objects.all().filter(owner=user_to_view)\n\n return render(request, 'woofer/view_user.html',\n {\n 'profile' : profile,\n 'dogs' : dogs\n })", "def edit_user(user_id):\n user = User.query.get_or_404(user_id)\n return render_template(\"users/edit_user.html\", user=user)", "def __str__(self):\r\n return str(self.userid)", "def get_identifier(self, request):\n return request.user.username", "def view_user(user_id):\n\n db_user = User.query.get_or_404(user_id)\n # ??? do null values cause issues on the server side?\n # I see a lot of instances in the log where an incorrect\n # parameter is used by SQL Alchemy and they typically happen\n # on records with nulls.\n\n allow_delete = True if len(db_user.posts) == 0 else False\n return render_template(\"view_user.html\", headline=\"Blogly User\",\n user=db_user, allow_delete=allow_delete)", "def getId(self):\n return self.getUserName()", "def getId(self):\n return self.getUserName()", "def user_id(self):\n return json_loads(self.user_json).get('id')", "def __whatsmyid(self, update, context):\n user = self.User(update)\n output = f\"your ID is: {user.id}\"\n user.send_message(output)\n self.data_base.log(user, update.message.text, \"*\" * len(str(user.id)))", "def edit_user(user_id):\n user = User.query.get_or_404(user_id)\n return render_template('/users/edit_page.html', user=user)", "def show_user_profile(user_id):\n\n user = User.query.filter_by(user_id=user_id).one()\n rating = Rating.query.filter_by(user_id=user_id).all()\n\n \n return render_template(\"user_detail.html\", user=user, rating=rating)", "def show_edit_form(user_id):\n\n user = User.query.get_or_404(user_id)\n\n return render_template(\"users/edit_user.html\", user=user)", "def show_edit_user_form(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template('edit_user.html', user=user)", "def id_user(self, id_user):\n\n self._id_user = id_user", "def select_user(users):\n\n # Construct the model.\n model = [_User(name=name, description=description)\n for name, description in users]\n\n # Construct the view.\n view = _UsersView(model=model)\n\n if view.configure_traits() and view.selection is not None:\n user = view.selection.name, view.selection.description\n else:\n user = '', ''\n\n return user", "def show_edit_user_form(user_id):\r\n user = User.query.get_or_404(user_id)\r\n\r\n return render_template('edit-user.html', user=user)" ]
[ "0.70061904", "0.6974337", "0.6950619", "0.6897555", "0.6804346", "0.67851716", "0.6750138", "0.67339456", "0.66650236", "0.66331357", "0.6614428", "0.65648824", "0.65122503", "0.65082175", "0.6507262", "0.65002286", "0.6484979", "0.6484979", "0.6449136", "0.6417354", "0.63946927", "0.63946927", "0.63920873", "0.6390408", "0.6390174", "0.6389911", "0.6379608", "0.6371694", "0.63588953", "0.6356666", "0.6356666", "0.63530505", "0.6336867", "0.63227296", "0.6319455", "0.6319455", "0.6319455", "0.6310752", "0.63037306", "0.63037306", "0.6299212", "0.6294896", "0.6294896", "0.62863666", "0.62863666", "0.62451464", "0.62449336", "0.62409717", "0.6204941", "0.61941016", "0.61846066", "0.6180807", "0.61765933", "0.61639917", "0.61622477", "0.615129", "0.6151011", "0.6149303", "0.6149303", "0.6149303", "0.6149303", "0.6149303", "0.61417186", "0.61417186", "0.61417186", "0.6141252", "0.6130975", "0.6099212", "0.60979766", "0.6074373", "0.6070549", "0.60628986", "0.60289776", "0.6028819", "0.60286015", "0.5971174", "0.5965882", "0.5963092", "0.5962953", "0.5962856", "0.5960254", "0.59505934", "0.5942618", "0.5940877", "0.59398305", "0.5939316", "0.5927088", "0.5925919", "0.59190327", "0.5910416", "0.5910416", "0.59104043", "0.59090245", "0.5905605", "0.5904087", "0.5901504", "0.5890012", "0.5888926", "0.5877301", "0.5872879" ]
0.66290426
10
Enables the ping command in your guild.
async def enable_ping( client, event, allow: ('bool', 'Enable?') = True, ): guild = event.guild if guild is None: abort('Guild only command.') if not event.user_permissions.can_administrator: abort('You must have administrator permission to invoke this command.') application_commands = await client.application_command_guild_get_all(guild) for application_command in application_commands: # If you are not working with overlapping names, a name check should be enough. if application_command.name == ping.name: command_present = True break else: command_present = False if allow: if command_present: content = 'The command is already present.' else: await client.application_command_guild_create(guild, ping.get_schema()) content = 'The command has been added.' else: if command_present: await client.application_command_guild_delete(guild, application_command) content = 'The command has been disabled.' else: content = 'The command is not present.' return Embed('Success', content)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def ping_command(self, ctx):\n ping = int(self.client.latency * 1000)\n embed = Embed(\n title=\"Pong!\", description=f\"My ping is {ping}ms.\", color=Color.green()\n )\n await ctx.send(embed=embed)", "async def ping(self, ctx):\r\n embed = discord.Embed(\r\n title = \"Ping\",\r\n description = \"Pinging...\",\r\n color = Config.MAINCOLOR\r\n )\r\n t1 = time.perf_counter()\r\n msg = await ctx.send(embed = embed)\r\n t2 = time.perf_counter()\r\n embed = discord.Embed(\r\n title = \"๐Ÿ“ Pong!\",\r\n description = f\"API latency is {round((t2 - t1) * 1000)}ms\\nHost latency is {round(self.bot.latency * 1000, 2)}ms\",\r\n color = Config.MAINCOLOR\r\n )\r\n await msg.edit(embed = embed)", "async def ping(self, ctx):\n botlatency = round(self.bot.latency * 1000, 3)\n embed = discord.Embed(title = \"Pong!\", description = f\":ping_pong: `{botlatency}ms`\", color = discord.Color.blurple())\n await ctx.send(embed = embed)", "async def ping(self, ctx : commands.Context) -> None:\n\n embed = Embed(\n title = \"๐Ÿ“ Pong!\",\n description = f\"Gateway latency is {int(round(self.bot.latency * 1000, 2))}ms.\",\n color = maincolor\n )\n await ctx.send(embed = embed)", "async def ping(self, ctx):\n self.log_command_call(\"ping\", ctx.message)\n embed_output = create_embed(description=\"pong\")\n await ctx.send(embed=embed_output)", "async def ping(self, ctx):\n await ctx.send(\"Pong\")", "async def ping(self, ctx: commands.Context):\n latency = str(round(self.bot.latency * 1000, 1))\n await ctx.send(\n embed=Embed(title=\"Pong!\", description=f\"{latency}ms\", color=Color.blue())\n )", "def ping(bot, sender, sendmsg, label, args):\n\n sendmsg(\"Pong!\")", "async def ping(self, ctx):\n await ctx.send('pong')", "def ping(self, *args):\n return self._command(b'PING', *args, handler=\"PONG\")", "async def ping(ctx):\n phrase = ['I am alive...',\n 'I was definitely not sleeping...',\n 'I was definitely not laughing...',\n 'I am still here',\n 'You are using a ping command? Why?',\n 'At your service.']\n ph = random.choice(phrase)\n lsm = round((client.latency) * 100)\n embed = discord.Embed(title='**pong...!**',\n description=f\"_{ph}_ \\n**~{lsm} ms taken**......\",\n color=discord.Color.gold())\n embed.set_footer(text='๐Ÿ˜ญ')\n await ctx.send(embed=embed)", "async def ping(self, ctx:utils.Context):\r\n\r\n await ctx.send(\"Pong!\")", "async def ping(self, ctx: commands.Context) -> None:\n # datetime.datetime objects do not have the \"milliseconds\" attribute.\n # It must be converted to seconds before converting to milliseconds.\n bot_ping = (datetime.utcnow() - ctx.message.created_at).total_seconds() * 1000\n if bot_ping <= 0:\n bot_ping = \"Your clock is out of sync, could not calculate ping.\"\n else:\n bot_ping = f\"{bot_ping:.{ROUND_LATENCY}f} ms\"\n\n # Discord Protocol latency return value is in seconds, must be multiplied by 1000 to get milliseconds.\n discord_ping = f\"{self.bot.latency * 1000:.{ROUND_LATENCY}f} ms\"\n\n embed = Embed(title=\"Pong!\")\n\n for desc, latency in zip(DESCRIPTIONS, [bot_ping, discord_ping]):\n embed.add_field(name=desc, value=latency, inline=False)\n\n await ctx.send(embed=embed)", "async def ping(ctx):\n t1 = time.perf_counter()\n await bot.send_typing(ctx.message.channel)\n t2 = time.perf_counter()\n thedata = (\":ping_pong: **Pong.**\\nTime: \" + str(round((t2 - t1) * 1000)) + \"ms\")\n color = ''.join([random.choice('0123456789ABCDEF') for x in range(6)])\n color = int(color, 16)\n data = discord.Embed(description = thedata, colour=discord.Colour(value = color))\n data.set_footer(text=\"Mod Bot v0.1 | Requested by: {}\".format(ctx.message.author))\n await bot.say(embed = data)", "async def ping(self, ctx):\n await ctx.message.add_reaction(\"๐Ÿ“\")\n embed = discord.Embed(title=\"Pong!\", description=f\"{1000*round(bot.latency, 3)} ms\")\n embed.set_footer(text=f\"Requested by {str(ctx.message.author)}\")\n await ctx.reply(embed=embed)", "async def ping(self, ctx):\n\n t_1 = time.perf_counter()\n await ctx.trigger_typing()\n t_2 = time.perf_counter()\n ping = round((t_2 - t_1) * 1000)\n embed = discord.Embed(color=self.bot.embed_color)\n embed.title = 'Pong! :ping_pong:'\n embed.description = f'That took {ping}ms!'\n await ctx.send(embed=embed)", "async def ping(self, ctx):\n ms = round(self.bot.latency * 1000)\n await self.send_message(ctx, 'Pong! ({} ms)'.format(ms))", "async def enable(self, ctx: Context, *, guild: int = None):\n\n if guild is None:\n guild = ctx.guild\n else:\n guild = self.bot.get_guild(guild)\n\n if not guild:\n return await ctx.message.add_reaction(\"โš \")\n\n self._create_guild_config(guild)\n\n await ctx.message.add_reaction(\"โœ…\")", "async def ping_command(ctx):\n await ctx.send(f\"Current ping is: **{round(ctx.bot.latency, 2)} seconds**\")", "async def ping(self, ctx):\n await ctx.send(f'Pong! {round(self.client.latency * 1000)}ms')", "async def ping(ctx):\n latency = bot.latency\n await ctx.send(\"Pong! \" + str(latency))", "async def ping(self, ctx):\n start = time.time()\n msg = await ctx.send(embed=\n discord.Embed(\n title=\"**Pong!**\",\n colour=discord.Color.green(),\n description=\"Pinging...\"\n )\n )\n end = time.time()\n between = int((end - start)*1000)\n await msg.edit(embed=\n discord.Embed(\n title=\"**Pong!**\",\n colour=discord.Color.green(),\n description=f\"*{between} ms*\"\n )\n )", "async def ping(self, context: Context) -> None:\n embed = discord.Embed(\n title=\"๐Ÿ“ Pong!\",\n description=f\"The bot latency is {round(self.bot.latency * 1000)}ms.\",\n color=0x9C84EF,\n )\n await context.send(embed=embed)", "def ping(self):\n return True", "def ping(self):\n return True", "def ping(self):\n pass", "async def admin_enable(self, ctx: commands.Context):\n if ctx.guild.id in self.guilds:\n await ctx.send('Team management is already enabled in this guild.')\n return\n await self._enable_guild(guild=ctx.guild)\n await ctx.send('Team management enabled.')", "async def _ping(self, ctx):\n latency = self.bot.latency * 1000\n e = discord.Embed(title=\"Pong.\", color=discord.Color.red())\n e.add_field(name=\"Discord API\", value=f\"```{str(round(latency))} ms```\")\n e.add_field(name=\"Typing\", value=\"```calculating ms```\")\n\n before = time.monotonic()\n message = await ctx.send(embed=e)\n typlatency = (time.monotonic() - before) * 1000\n\n e = discord.Embed(title=\"Pong.\", color=discord.Color.green())\n e.add_field(name=\"Discord API\", value=f\"```py\\n{str(round(latency))} ms```\")\n e.add_field(name=\"Typing\", value=f\"```py\\n{str(round(typlatency))} ms```\")\n\n await message.edit(embed=e)", "def ping(self):\n return 'ping'", "async def ping(ctx):\n if ctx.channel.name.lower() in channels:\n await ctx.send(\"pong\")", "def ping(self, message, args):\n self._telegram_api.send_text_message(message.chat_id, 'Pong!', reply_to=message.message_id)", "def ping(self):\n\t\treturn self._send_command_to_entity_server(us.SERVER_COMMAND_PING)", "async def toggle(self, ctx):\n guild = ctx.message.guild\n\n enabled = await self.config.guild(guild).enabled()\n\n enabled = not enabled\n await self.config.guild(guild).enabled.set(enabled)\n\n if enabled is True:\n await ctx.send(\"AntiSpam has been enabled\")\n else:\n await ctx.send(\"AntiSpam has been disabled\")", "async def __ping(self, e: Message):\n if self.__on_ping:\n await self.__on_ping(e)\n else:\n await e.reply(\"Pong!\")", "async def enable(self, ctx):\n self.bot.db.execute(\"UPDATE starboards SET enabled = 1 WHERE channel_id = ?\", (ctx.channel.id,))\n await ctx.say(\"star.enabled\")", "def ping(self):\n packet = Packet()\n packet.message = MessageType.CLIENT_PING\n packet.data = \"PING\"\n try:\n self.send(packet.encode())\n self.last_ping_time = time.time()\n except socket.error, e:\n self.console.error(repr(e))", "async def ping(self, ctx: commands.Context):\r\n await ctx.send(f'Bot\\' latency is {self.bot.latency*1000:.3f}ms')", "def enable(ctx):\n\n config_db = ConfigDBConnector()\n config_db.connect()\n config_db.mod_entry(\"NAT_GLOBAL\", \"Values\", {\"admin_mode\": \"enabled\"})", "async def ping(self, ctx):\n await ctx.send(f'Pong! {round(self.bot.latency * 1000)}ms')\n await ctx.message.delete(delay=3)", "def ping(self):\n self._write(f'PING :{self.server.name}')\n self.awaiting_pong_since = datetime.datetime.now()", "def ping(self):\n return", "async def ping(self, ctx):\n pong_msg = await ctx.send(\":ping_pong:\")\n sr_lat = (pong_msg.created_at - ctx.message.created_at).total_seconds() * 1000\n await pong_msg.edit(content=f\"Command latency = `{sr_lat}ms`\\n\"\n f\"API heartbeat = `{self.client.latency * 1000:.1f}ms`\")\n self.logger.info(misolog.format_log(ctx, f\"\"))", "def setup(bot):\n bot.add_cog(AntiGhostPing(bot))", "async def ping(self, ctx: MyContext):\n _ = await ctx.get_translate_function()\n\n t_1 = time.perf_counter()\n await ctx.trigger_typing() # tell Discord that the bot is \"typing\", which is a very simple request\n t_2 = time.perf_counter()\n time_delta = round((t_2 - t_1) * 1000) # calculate the time needed to trigger typing\n await ctx.send(_(\"Pong. โ€” Time taken: {milliseconds}ms\",\n milliseconds=time_delta)) # send a message telling the user the calculated ping time", "def ping(self) -> None:\n ...", "async def _cmdf_pmenable(self, substr, msg, privilege_level):\n enabled_str = None\n if utils.str_says_true(substr) or (len(substr) == 0):\n self._pm_msg_isenabled = True\n enabled_str = \"enabled.\"\n else:\n self._pm_msg_isenabled = False\n enabled_str = \"disabled.\"\n self._save_settings()\n\n buf = \"PM greetings is now \" + enabled_str\n await self._client.send_msg(msg, buf)\n return", "def ping(self, irc, msg, args):\n irc.reply('pong', prefixNick=False)", "async def ping(ctx):\n em = discord.Embed()\n em.title ='Pong! Websocket Latency:'\n em.description = f\"{bot.ws.latency * 1000:.4f} ms\"\n await ctx.send(embed=em)", "def help(self, message, args):\n self._telegram_api.send_text_message(message.chat_id,\n 'This module just replies \"Pong!\" to /ping command',\n reply_to=message.message_id)", "def server_ping(self):\n \n if not hasattr(self,\"ping_data\"):\n #need to include a list called ping_data - which is updated as needed. by \"ping_data fnctions in objects of the server.\n #Nmaely this includes a list of instruments that are attached to the server.\n self.ping_data={\"server_id\":self.id,\n \"server_name\":self.name,\n \"server_ip\":self.ip,\n \"server_port\":str(wsport),\n \"server_id_node\":self.id_node,\n \"server_ping\":\"ping!\"}\n self.ping_data.update({\"server_time\":time.time()})\n self.multicast.protocol.send(simplejson.dumps(self.ping_data))\n server_command = commands.ServerCommand(self.server, self.server_ping)\n reactor.callLater(self.server_ping_period,\n self.command_queue.add,\n server_command)", "def sendPing(self, payload=None):", "async def ping(ctx):\n await ctx.send(\"pong\")", "def handle_ping(self, host):\n self.send(\"PONG :{}\".format(host))", "async def ping():\n await bot.say(\"Pong\")", "async def admin_local_ping(self, ctx: commands.Context,\n team_identifier: str, *message):\n\n pass", "async def ping(self) -> APIReturn:\n return await self._request(\"GET\", \"/ping\")", "def rpc_ping(self):\n\t\treturn True", "def ping():\n # TODO: this ought to live in ISlaveControl, maybe with disconnect()\n # or something. However the event that is emitted is most useful in\n # the Builder column, so it kinda fits here too.", "def ping(self, caller):\n if not hasattr(self, \"_ping_callers\"):\n self._ping_callers = []\n self._ping_callers.append(caller)\n super(ServerBot, self).msg(ping=\"\")", "def enable(self):\n self._logger.debug(\"%s: request to enable monitoring\",\n self.ping_address)\n if self._monitor_enabled:\n return\n self._is_stale = False\n self._enable_monitor()", "async def ping(self, *args, **kwargs):\n\n return await self._makeApiCall(self.funcinfo[\"ping\"], *args, **kwargs)", "def ping(self):\n\n rlog(1, self.name, 'sending ping')\n try:\n self.putonqueue(1, 'PING :%s' % self.server)\n return 1\n except Exception, ex:\n rlog(10, self.name, \"can't send ping: %s\" % str(ex))\n return 0", "async def enable(self, ctx):\n await self.config.guild(ctx.guild).auto.set(True)\n await ctx.send(_(\"Automatic voicechannel creation enabled.\"))", "def ping(self,\n *opts, # type: PingOptions\n **kwargs # type: Any\n ) -> PingResult:\n return super().ping(*opts, **kwargs)", "async def ping(ctx, command=None, *args):\n if command is None:\n return await ctx.send(\"Uh, I need a command you want to run.\")\n member = ctx.message.author.id\n if len(args) > 8:\n return await ctx.send(\"You are giving me too many pings at once! Please separate your requests over multiple commands.\")\n if command.lower() in [\"add\", \"new\", \"addregex\", \"newregex\", \"addregexp\", \"newregexp\", \"delete\", \"remove\", \"test\", \"try\"] and len(args) < 1:\n return await ctx.send(f\"In order to {command} a ping, you must supply a regular expression or word.\")\n if command.lower() in [\"add\", \"new\", \"addregex\", \"newregex\", \"addregexp\", \"newregexp\"]:\n # Check to see if author in ping info already\n ignored_list = []\n if any([True for u in PING_INFO if u['id'] == member]):\n #yes\n user = next((u for u in PING_INFO if u['id'] == member), None)\n pings = user['pings']\n for arg in args:\n try:\n re.findall(arg, \"test phrase\")\n except:\n await ctx.send(f\"Ignoring adding the `{arg}` ping because it uses illegal characters.\")\n ignored_list.append(arg)\n continue\n if f\"({arg})\" in pings or f\"\\\\b({arg})\\\\b\" in pings or arg in pings:\n await ctx.send(f\"Ignoring adding the `{arg}` ping because you already have a ping currently set as that.\")\n ignored_list.append(arg)\n else:\n if command.lower() in [\"add\", \"new\"]:\n print(f\"adding word: {re.escape(arg)}\")\n pings.append(fr\"\\b({re.escape(arg)})\\b\")\n else:\n print(f\"adding regexp: {arg}\")\n pings.append(fr\"({arg})\")\n else:\n # nope\n if command.lower() in [\"add\", \"new\"]:\n PING_INFO.append({\n \"id\": member,\n \"pings\": [fr\"\\b({re.escape(arg)})\\b\" for arg in args]\n })\n else:\n PING_INFO.append({\n \"id\": member,\n \"pings\": [fr\"({arg})\" for arg in args]\n })\n return await ctx.send(f\"Alrighty... I've got you all set up for the following pings: \" + (\" \".join([f\"`{arg}`\" for arg in args if arg not in ignored_list])))\n elif command.lower() in [\"delete\", \"remove\"]:\n user = next((u for u in PING_INFO if u['id'] == member), None)\n if user == None or len(user['pings']) == 0:\n return await ctx.send(\"You have no registered pings.\")\n for arg in args:\n if arg == \"all\":\n user['pings'] = []\n return await ctx.send(\"I removed all of your pings.\")\n if arg in user['pings']:\n user['pings'].remove(arg)\n await ctx.send(f\"I removed the `{arg}` RegExp ping you were referencing.\")\n elif f\"\\\\b({arg})\\\\b\" in user['pings']:\n user['pings'].remove(f\"\\\\b({arg})\\\\b\")\n await ctx.send(f\"I removed the `{arg}` word ping you were referencing.\")\n elif f\"({arg})\" in user['pings']:\n user['pings'].remove(f\"({arg})\")\n await ctx.send(f\"I removed the `{arg}` RegExp ping you were referencing.\")\n else:\n return await ctx.send(f\"I can't find my phone or the **`{arg}`** ping you are referencing, sorry. Try another ping, or see all of your pings with `!ping list`.\")\n return await ctx.send(\"I removed all pings you requested.\")\n elif command.lower() in [\"list\", \"all\"]:\n user = next((u for u in PING_INFO if u['id'] == member), None)\n if user == None or len(user['pings']) == 0:\n return await ctx.send(\"You have no registered pings.\")\n else:\n pings = user['pings']\n regex_pings = []\n word_pings = []\n for ping in pings:\n if ping[:2] == \"\\\\b\":\n word_pings.append(ping)\n else:\n regex_pings.append(ping)\n if len(regex_pings) > 0:\n await ctx.send(\"Your RegEx pings are: \" + \", \".join([f\"`{regex}`\" for regex in regex_pings]))\n if len(word_pings) > 0:\n await ctx.send(\"Your word pings are: \" + \", \".join([f\"`{word[3:-3]}`\" for word in word_pings]))\n elif command.lower() in [\"test\", \"try\"]:\n user = next((u for u in PING_INFO if u['id'] == member), None)\n user_pings = user['pings']\n matched = False\n for arg in args:\n for ping in user_pings:\n if len(re.findall(ping, arg, re.I)) > 0:\n await ctx.send(f\"Your ping `{ping}` matches `{arg}`.\")\n matched = True\n if not matched:\n await ctx.send(\"Your test matched no pings of yours.\")\n else:\n return await ctx.send(\"Sorry, I can't find that command.\")", "async def ping(self, ctx):\n embed = Embed(\n title=\"Pong! Websocket Latency:\",\n description=f\"{self.bot.ws.latency * 1000:.4f} ms\",\n color=self.bot.main_color,\n )\n return await ctx.send(embed=embed)", "def ping():\n return ping_response()", "def ping():\n return ping_response()", "def ping():\n return ping_response()", "def ping():\n return ping_response()", "async def ping(self, ctx):\n msg_time = ctx.message.created_at\n cur_time = datetime.utcnow()\n delay = (cur_time - msg_time) / timedelta(milliseconds=1)\n await ctx.send(f\"Pong! ({str(delay)} ms)\")", "def enable(self, message):\n self.Enable()", "async def enable(self, ctx, *, channel: discord.Channel=None):\n\n server = ctx.message.server\n\n temp = self.bot.dota_ticker_settings.get(server.id)\n\n if temp is not None and temp['enabled']:\n await self.bot.say('The match ticker has already been enabled on this server.')\n return\n\n if channel is None:\n channel = server.default_channel\n\n settings = {'enabled': True, 'channel_id': channel.id}\n\n await self.bot.dota_ticker_settings.put(server.id, settings)\n await self.bot.say('The match ticker has been enabled on {0.mention}.'.format(channel))", "async def ping(self, ctx):\n current_time = int(round(time.time() * 1000))\n m = await say(ctx, \"Pong! ---ms\")\n last_time = int(round(time.time() * 1000))\n last_time -= current_time\n await m.edit(content=\"Pong! {}ms\".format(last_time))", "async def do_ping(self):\n return self._result(\"pong\")", "def testPing(self, minion_id):\n self.console_manager.printRed(''.join([\"Ping minion id: \", minion_id]))\n command = ['sudo', 'salt', minion_id, 'test.ping']\n self.console_manager.runCommandFromShell(command)\n return", "async def power_on(self):\n ...", "async def ping(self, ctx):\n\n msg = f\"{(self.bot.ws.latency * 1000):.2f} ms\"\n await ctx.info(f\"Bot Latency: {msg}\")", "def test_ping():\n mock = MagicMock(return_value=True)\n with patch.dict(win_network.__salt__, {\"cmd.run\": mock}):\n assert win_network.ping(\"127.0.0.1\")", "def pinger(var, wrapper, message):\n wrapper.reply(messages[\"ping\"].format(nick=wrapper.source, bot_nick=users.Bot))", "def lnet_ping(self):\n try:\n nodes = Conf.get(self._index, 'cluster>server_nodes')\n except:\n raise MotrError(errno.EINVAL, \"Server nodes not found\")\n\n check_type(nodes, dict, \"server_nodes\")\n\n nids = get_nids(self, nodes)\n\n sys.stdout.write(\"lnet pinging on all nodes in cluster\\n\")\n sys.stdout.write(\"motr_setup init MUST be performed on all nodes before \"\n \"executing this\\n\")\n for nid in nids:\n cmd = f\"lctl ping {nid}\"\n sys.stdout.write(f\"lctl ping on: {nid}\\n\")\n execute_command(self, cmd)", "async def ping(self):\n uri = \"/fapi/v1/ping\"\n success, error = await self.request(\"GET\", uri)\n return success, error", "def ping() -> str:\n return \"Server is here\"", "async def admin_global_ping(self, ctx: commands.Context,\n team_identifier: str, *message):\n pass", "def ping(self) -> Response:\n raise NotImplementedError", "def enable(self):\r\n self.update(enabled=True)", "async def send_heartbeat_message(self, *args, **kwargs):\n msg = \"ping\"\n await self._ws.send(msg)", "async def async_turn_on(self, **kwargs: Any) -> None:\n await self.entity_description.set_command(self, True)", "def Ping(self): # real signature unknown; restored from __doc__\n pass", "async def ping_pm(user_id, pinger, ping_exp, channel, content, jump_url):\n user_to_send = bot.get_user(user_id)\n try:\n content = re.sub(rf'{ping_exp}', r'**\\1**', content, flags=re.I)\n except Exception as e:\n print(f\"Could not bold ping due to unfavored RegEx. Error: {e}\")\n ping_exp = ping_exp.replace(r\"\\b(\", \"\").replace(r\")\\b\", \"\")\n warning = f\"\\n\\nIf you don't want this ping anymore, in `#bot-spam` on the server, send `!ping remove {ping_exp}`\"\n embed = assemble_embed(\n title=\":bellhop: Ping Alert!\",\n desc=(f\"Looks like `{pinger}` pinged a ping expression of yours in the Scioly.org Discord Server!\" + warning),\n fields=[\n {\"name\": \"Expression Matched\", \"value\": f\" `{ping_exp}`\", \"inline\": \"True\"},\n {\"name\": \"Jump To Message\", \"value\": f\"[Click here!]({jump_url})\", \"inline\": \"True\"},\n {\"name\": \"Channel\", \"value\": f\"`#{channel}`\", \"inline\": \"True\"},\n {\"name\": \"Content\", \"value\": content, \"inline\": \"False\"}\n ],\n hexcolor=\"#2E66B6\"\n )\n await user_to_send.send(embed=embed)", "def ping_daemon(self):\n s = self.ping_interval\n while True:\n p = domintell.messages.Ping()\n self.send(p)\n time.sleep(s)", "def turn_on(self, **kwargs):\n set_sonoff_state(self._host, \"on\")\n self._state = True", "def cmd_enable(self, app_name=None):\n rc = self.socket_command_with_project('enable', app_name)\n return rc", "def ping(self) -> bool:\n # consider 200 to be successful\n response = self.shards_response(\"ping\")\n return response.status_code == 200", "def _ping(self):\n\n self.last_ping = time.time()\n try:\n logger.debug(\"(%s) PING\", self.device[\"ip\"])\n _send_request(self.device, tf.HEART_BEAT)\n except socket.error:\n self.force_reconnect = True", "async def ping(self, ctx):\n m = await ctx.send(\"One moment...\")\n t1 = ctx.message.created_at\n t2 = m.created_at\n rc = (t2 - t1).total_seconds()\n emoji = 'โ˜ ๏ธ' if rc > 50 else ('๐Ÿ˜ญ' if rc > 5 else ('๐Ÿ˜จ' if rc > 1 else '๐Ÿ‘Œ'))\n await m.edit(content=\"Pong! `{0:.3f}s` {1}\\n\".format(rc, emoji))", "async def ping(ctx):\n latencies = {\n \"websocket\": bot.latency,\n }\n\n def comp_message():\n msgs = []\n for title in latencies:\n msgs.append(f\"{title.title()}: {(latencies[title] * 1000):.0f}ms\")\n return '\\n'.join(msgs)\n\n start = time.perf_counter()\n await ctx.respond(comp_message())\n end = time.perf_counter()\n\n latencies[\"round trip\"] = end - start\n\n await ctx.edit(content=comp_message())", "def ping(event, context):\n logger.info(\"Ping requested.\")\n return _get_response(200, \"PONG!\")", "def enable():\n ret = {}\n result = __salt__[\"cmd.run_all\"](\n \"pfctl -e\", output_loglevel=\"trace\", python_shell=False\n )\n\n if result[\"retcode\"] == 0:\n ret = {\"comment\": \"pf enabled\", \"changes\": True}\n else:\n # If pf was already enabled the return code is also non-zero.\n # Don't raise an exception in that case.\n if result[\"stderr\"] == \"pfctl: pf already enabled\":\n ret = {\"comment\": \"pf already enabled\", \"changes\": False}\n else:\n raise CommandExecutionError(\n \"Could not enable pf\",\n info={\"errors\": [result[\"stderr\"]], \"changes\": False},\n )\n\n return ret", "def enable(self):\n self.enabled = True" ]
[ "0.66952026", "0.6496358", "0.64724076", "0.64568186", "0.6376278", "0.6348216", "0.6326083", "0.6313369", "0.62750304", "0.62690145", "0.6257727", "0.6251653", "0.62509406", "0.62384945", "0.6231913", "0.62314135", "0.62132555", "0.61965", "0.61832154", "0.61657685", "0.6158019", "0.615301", "0.6104694", "0.6103379", "0.6103379", "0.60839504", "0.60764945", "0.6057231", "0.6040985", "0.6023026", "0.6021492", "0.60194176", "0.60188866", "0.5982721", "0.5972089", "0.59649664", "0.5949103", "0.5945979", "0.5945655", "0.59260315", "0.59259015", "0.59112346", "0.5879573", "0.5858311", "0.5853191", "0.583531", "0.5831366", "0.58192486", "0.58083826", "0.5804771", "0.5792619", "0.57869625", "0.57824826", "0.573903", "0.57271516", "0.57035863", "0.5679901", "0.5673377", "0.5665121", "0.56561506", "0.56460583", "0.5645145", "0.5632026", "0.56084347", "0.5588203", "0.5578542", "0.5578096", "0.5578096", "0.5578096", "0.5578096", "0.5571942", "0.55628496", "0.55584276", "0.5553052", "0.55511445", "0.55429566", "0.55363894", "0.54690737", "0.5456947", "0.54472375", "0.5445563", "0.5445307", "0.5428253", "0.5416575", "0.5408098", "0.54031205", "0.53998786", "0.5393392", "0.5391446", "0.53740126", "0.5368714", "0.53666073", "0.53604716", "0.53582346", "0.5350666", "0.5348222", "0.5344922", "0.5335219", "0.53335816", "0.5311327" ]
0.7750108
0
Returns the selected thread channel's name's length.
async def thread_channel_name_length( channel: ('channel_group_thread', 'Select a thread channel.') ): return len(channel.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def text_channel_name_length(\n channel: Channel\n):\n return len(channel.name)", "async def voice_channel_name_length(\n channel: P('channel', 'Select a voice channel', channel_types = [ChannelType.guild_voice])\n):\n return len(channel.name)", "def channel_size(self):\n if self.channels is None:\n return 0\n return self.channels.size", "def get_length(self):\n return self.run_command('get_length')[0]", "def getconenamelen(self,i_): # 3\n res,resargs = self.__obj.getconenamelen(i_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _len_return_value = resargs\n return _len_return_value", "def namelength(self):\n return self[\"namelength\"]", "def getconenamelen(self,i_):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_getconenamelen(self.__nativep,i_,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def __len__(self) -> int:\n\n return len(self._space.CHANNELS) + 1", "def length_of_name(self, name):\n length = len(name)\n if length > 10:\n self.show_message_when_name_very_long()\n return length", "def n_channels(self):\n return len(self.channels)", "def gettasknamelen(self):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_gettasknamelen(self.__nativep,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def getNchan(self):\n return self.shape(squeeze=False)[2]", "def message_length(self):\n return self._message_length", "def test_support_CHANNELLEN(self):\n default = irc.ServerSupportedFeatures()._features[\"CHANNELLEN\"]\n self._testIntOrDefaultFeature(\"CHANNELLEN\", default)", "def length_name(self):\n return self._src_decoder.length_tensor_name", "def get_length(self):\n return self._select_interface(self._rc_get_length,\n self._http_get_length)", "def _get_length(self):\n return self._length", "def getLength(self):\n return self.n", "def n_channels(self):\n return self._n_channels", "def getLength(self):\n return self.count", "def sent_len(self) -> int:\n raise NotImplementedError(\"must be implemented by subclasses\")", "def length(self):\n\t\treturn self.n", "def __len__(self):\n # TODO: Is this method used?\n return self._info['length']", "def getLen(self):\n return self.len", "def getLength(msg):\n return len(msg)", "def get_length(self):\n return self._length", "def get_length(self):\n return self._length", "def length(self):\n return self.counter", "def gettasknamelen(self): # 3\n res,resargs = self.__obj.gettasknamelen()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _len_return_value = resargs\n return _len_return_value", "def get_number_of_channels(tgt_l, model_graph):\n return int(model_graph.get_tensor_by_name(tgt_l + ':0').get_shape()[-1])", "def get_length(self):\n\n return self.length", "def get_context_length(self):\n return self.context_length", "def length(self):\n return self._info.length # pylint: disable=E1101", "def get_message_length(self):\n return len(self._payload)", "def len(self):\n return self.n", "def length(self) -> 'int':\n return self._info.len", "def getconnamelen(self,i_):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_getconnamelen(self.__nativep,i_,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def Lof(channel):\n return FileLen(VBFiles.getFile(channel).name)", "def getLength(self):\n return self.length", "def get_num_channels():\r\n check_mixer()\r\n return sdl.Mix_GroupCount(-1)", "def get_num_channels(self):\n return _uhd_swig.tx_streamer_get_num_channels(self)", "def __len__(self) -> int:\n return self._len", "def get_length(self):\n\n return self._length", "def __len__(self) -> int:\n return self._length", "def __len__(self) -> int:\n return self.length", "def getvarnamelen(self,i_):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_getvarnamelen(self.__nativep,i_,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def length(self):\n return self.count", "def __len__(self) -> int:\n return len(self.length)", "def get_num_channels(self):\r\n check_mixer()\r\n return sdl.Mix_GroupCount(self._chunk_tag)", "def getvarnamelen(self,i_): # 3\n res,resargs = self.__obj.getvarnamelen(i_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _len_return_value = resargs\n return _len_return_value", "def length(self) -> int:\r\n\r\n return self.__length", "def length(self):\n return self.length", "def getLength( self, sbjct_token ):\n if not self.mIsLoaded: self.__loadIndex()\n return self.mIndex[sbjct_token][2]", "def length(self) -> int:\n return self.size", "def length(self) -> int:\n return self.size", "def get_string_length(self):\n return int(self.read('H')[0])", "def getLength(self):\n return None", "def getLength(self):\n return None", "def GetLen(*args, **kwargs):\n return _gdi_.PseudoDC_GetLen(*args, **kwargs)", "def size(self):\n return len(self.chars)", "def length(self):\n return self._length", "def length(self):\n return self._length", "def length(self):\n return self.__length", "def length(self):\n return self.__length", "def length(self):\n ...", "def __len__(self):\n return self._length", "def __len__(self):\n return self._length", "def __len__(self):\n return self._length", "def get_size(channels):\n\n if channels not in (1, 2):\n raise ValueError('Wrong channels value. Must be equal to 1 or 2')\n\n return _get_size(channels)", "def get_length(self):\n return self.resource.get_size()", "def max_length(self):\n\t\treturn self._max_length", "def length(self):\n return len(self._commands)", "def __len__(self):\n return self._fa.faidx.index[self.name].rlen", "def channel_count(self):\n index = self._ordered_input_names.index('channel_count')\n return self._inputs[index]", "def get_size(self):\n return len(self.board)", "def Length(self) -> int:", "def Length(self) -> int:", "def __len__(self):\n return self._length # pylint: disable = E1101", "def len(self):\n # print(self.processed_file_names)\n return self.len_", "def llen(self, name):\n self.connect()\n self._write('LLEN %s\\r\\n' % name)\n return self._get_numeric_response()", "def get_length(self):\n return len(self.target) + len(self.action)", "def __len__(self):\n return self._n", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def length(self):\n pass", "def __len__(self):\n return self.__length", "def getobjnamelen(self):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_getobjnamelen(self.__nativep,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def target_length_name(self):\n name = dsutils._connect_name(\n self._data_spec.name_prefix[1],\n self._tgt_decoder.length_tensor_name)\n return name", "def length(self):\n return self.size", "def tester(name):\n return len(name)", "def get_length(self):\r\n return len(self.tweets)", "def get_length(self):\n return len(self.cards)", "def __len__(self):\n\t\treturn self.n", "def __len__(self):\n return self.n", "def __len__(self):\n return self.n", "def __len__(self):\n return self.n" ]
[ "0.79556036", "0.7465818", "0.67134553", "0.6301943", "0.62970394", "0.6221772", "0.61826706", "0.6180919", "0.61410934", "0.6127651", "0.61114144", "0.60849816", "0.6030827", "0.6022877", "0.6015919", "0.60124755", "0.5999161", "0.59950984", "0.5956299", "0.59463084", "0.5944298", "0.5928128", "0.5918257", "0.5889253", "0.5882715", "0.5874517", "0.5874517", "0.5873128", "0.5865276", "0.58158845", "0.5799346", "0.5795728", "0.57822734", "0.57751", "0.5758126", "0.5749913", "0.5748671", "0.574762", "0.57415307", "0.5733018", "0.5732402", "0.5724497", "0.57196206", "0.57183504", "0.5696746", "0.56915224", "0.56845826", "0.567289", "0.5668339", "0.5666983", "0.56658983", "0.56519026", "0.5650973", "0.56496125", "0.56496125", "0.5645285", "0.5641048", "0.5641048", "0.56259614", "0.56231695", "0.5621125", "0.5621125", "0.56159055", "0.56159055", "0.56145847", "0.55936563", "0.55936563", "0.55936563", "0.55893564", "0.55883545", "0.5584895", "0.5582623", "0.5572272", "0.5569956", "0.55667585", "0.55646753", "0.55646753", "0.5559565", "0.5559219", "0.55558324", "0.55541617", "0.55513954", "0.55510384", "0.55510384", "0.55510384", "0.55510384", "0.55510384", "0.55510384", "0.5547265", "0.55394864", "0.5539063", "0.5538064", "0.5533983", "0.5529087", "0.55286634", "0.55281746", "0.5527771", "0.55119276", "0.55119276", "0.55119276" ]
0.89964545
0
Returns the selected voice channel's name's length.
async def voice_channel_name_length( channel: P('channel', 'Select a voice channel', channel_types = [ChannelType.guild_voice]) ): return len(channel.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def text_channel_name_length(\n channel: Channel\n):\n return len(channel.name)", "async def thread_channel_name_length(\n channel: ('channel_group_thread', 'Select a thread channel.')\n):\n return len(channel.name)", "def namelength(self):\n return self[\"namelength\"]", "def __len__(self) -> int:\n\n return len(self._space.CHANNELS) + 1", "def channel_size(self):\n if self.channels is None:\n return 0\n return self.channels.size", "def get_length(self):\n return self.run_command('get_length')[0]", "def get_length(self):\n return self._select_interface(self._rc_get_length,\n self._http_get_length)", "def length_of_name(self, name):\n length = len(name)\n if length > 10:\n self.show_message_when_name_very_long()\n return length", "def get_num_channels():\r\n check_mixer()\r\n return sdl.Mix_GroupCount(-1)", "def get_length(self):\r\n check_mixer()\r\n frequency, format, channels = (ffi.new('int*'), ffi.new('uint16_t*'),\r\n ffi.new('int*'))\r\n sdl.Mix_QuerySpec(frequency, format, channels)\r\n if format == sdl.AUDIO_S8 or format == sdl.AUDIO_U8:\r\n mixerbytes = 1.0\r\n else:\r\n mixerbytes = 2.0\r\n numsamples = self.chunk.alen / mixerbytes / channels[0]\r\n return numsamples / frequency[0]", "def _get_length(self):\n return self._length", "def get_length(self):\r\n return len(self.deck)", "def getLength(self):\n return self.count", "def getLength(self):\n return self.length", "def get_length(self):\n return len(self.cards)", "def get_length(self):\n return self._length", "def get_length(self):\n return self._length", "def get_length(self):\n\n return self.length", "def get_length(self):\r\n return len(self.hand)", "def __len__(self):\n # TODO: Is this method used?\n return self._info['length']", "def num_channels(self):\n with audioread.audio_open(self.path) as f:\n return f.channels", "def length(self):\n return self._info.length # pylint: disable=E1101", "def n_channels(self):\n return len(self.channels)", "def getLen(self):\n return self.len", "def get_num_channels(self):\r\n check_mixer()\r\n return sdl.Mix_GroupCount(self._chunk_tag)", "def getLength(self):\n return self.n", "def GetLen(*args, **kwargs):\n return _gdi_.PseudoDC_GetLen(*args, **kwargs)", "def get_length(self):\n\n return self._length", "def Lof(channel):\n return FileLen(VBFiles.getFile(channel).name)", "def length_name(self):\n return self._src_decoder.length_tensor_name", "def length(self) -> 'int':\n return self._info.len", "def getconenamelen(self,i_): # 3\n res,resargs = self.__obj.getconenamelen(i_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _len_return_value = resargs\n return _len_return_value", "def length(self):\n return self.length", "def get_len(song, album):\r\n length = 0\r\n words = dbase()[album][0][song]\r\n words = words[2]\r\n words = words.split()\r\n for word in words:\r\n length += 1\r\n return str(length)", "def length(self):\n\t\treturn self.n", "def test_support_CHANNELLEN(self):\n default = irc.ServerSupportedFeatures()._features[\"CHANNELLEN\"]\n self._testIntOrDefaultFeature(\"CHANNELLEN\", default)", "def llen(self, name):\n self.connect()\n self._write('LLEN %s\\r\\n' % name)\n return self._get_numeric_response()", "def get_wavelength(self, channel):\n\n wavelength = self.device.query(f':WAVEL{channel}:VAL?')\n return int(float(wavelength))", "def Length(self) -> int:", "def Length(self) -> int:", "def length(self):\n return self.counter", "def size(self):\n return len(self.chars)", "def length(self) -> int:\r\n\r\n return self.__length", "def length(self):\n return self._length", "def length(self):\n return self._length", "def n_channels(self):\n return self._n_channels", "def getLength(self) -> float:\n return self.length", "def getLength(self):\n return None", "def getLength(self):\n return None", "def length(self):\n return self.__length", "def length(self):\n return self.__length", "def length(self):\n ...", "def get_string_length(self):\n return int(self.read('H')[0])", "def length(self):\n return self.count", "def __len__(self) -> int:\n return len(self.length)", "def __len__(self) -> int:\n return self._length", "def __len__(self) -> int:\n return self.length", "def length(self) -> int:\n return self.size", "def length(self) -> int:\n return self.size", "def length(self):\n pass", "def getconenamelen(self,i_):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_getconenamelen(self.__nativep,i_,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def __len__(self):\n\n try:\n return len(self.counts)\n except SpectrumError:\n return len(self.cps)", "def get_length(self):\n return self.resource.get_size()", "def sent_len(self) -> int:\n raise NotImplementedError(\"must be implemented by subclasses\")", "def size(self):\n return len(self.sentence)", "def get_playlist_length():\n \n return len(_mpd_get_playlist())", "def getLength(self):\n return self.sideLength", "def size(self) -> int:\r\n return self.da.length()", "def length(self) -> int:\n pass", "def length(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'length'\")", "def getLength( self, sbjct_token ):\n if not self.mIsLoaded: self.__loadIndex()\n return self.mIndex[sbjct_token][2]", "def length(self):\n return self.size", "def get_num_channels(self):\n return _uhd_swig.tx_streamer_get_num_channels(self)", "def size(self) -> int:\n size = self.da.length()\n return size", "def __len__(self):\n return self._length # pylint: disable = E1101", "def get_num_channels(self):\n return _uhd_swig.rx_streamer_get_num_channels(self)", "def max_length(self):\n\t\treturn self._max_length", "def __len__(self):\n return self._length", "def __len__(self):\n return self._length", "def __len__(self):\n return self._length", "def length(self):\n\n return self._length", "def length(self):\n return len(self._commands)", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def get_size(channels):\n\n if channels not in (1, 2):\n raise ValueError('Wrong channels value. Must be equal to 1 or 2')\n\n return _get_size(channels)", "def __len__(self) -> int:\n return self._len", "def size(self) -> int:\n return self.da.length()", "def size(self) -> int:\n return self.da.length()", "def size(self) -> int:\n return self.da.length()", "def size(self) -> int:\n return self.da.length()", "def get_wavelength(self, c, channel=-1):\n if (channel == -1):\n channel = self.guess_channel()\n\n try:\n wavelength = self.binding.get_wavelength_num(channel)\n return wavelength * nm;\n except Exception, e:\n return self.handle_wavemeter_error(e)", "def get_length(self):\n length = 0\n for card in self.decklist:\n length += card.amount\n return length", "def __len__(self):\n\n return self.length", "def tester(name):\n return len(name)", "def total_length():\n return", "def __len__(self):\n return self.__length" ]
[ "0.7998428", "0.72766817", "0.67870444", "0.6776061", "0.6695213", "0.6688253", "0.6598871", "0.65934974", "0.64694244", "0.63770723", "0.6338646", "0.63136065", "0.6306306", "0.63023925", "0.6284565", "0.62841916", "0.62841916", "0.6279189", "0.6251116", "0.62204427", "0.62060505", "0.61950886", "0.61881214", "0.61818653", "0.617914", "0.61507547", "0.6136938", "0.61245686", "0.61174935", "0.61085206", "0.60831183", "0.6071044", "0.606199", "0.60564345", "0.6018589", "0.6004197", "0.59950835", "0.59882975", "0.5985318", "0.5985318", "0.59798056", "0.5979354", "0.597906", "0.5972014", "0.5972014", "0.59617126", "0.5958581", "0.5952286", "0.5952286", "0.595097", "0.595097", "0.5946705", "0.59283495", "0.59043086", "0.5895108", "0.5879788", "0.5877918", "0.5877809", "0.5877809", "0.58769447", "0.58738965", "0.5870218", "0.5856801", "0.58412087", "0.58393115", "0.5838237", "0.58377314", "0.58250344", "0.58238995", "0.5820349", "0.58179194", "0.5816627", "0.5815741", "0.5808272", "0.580061", "0.57966924", "0.5790569", "0.5782874", "0.5782874", "0.5782874", "0.5779172", "0.5778325", "0.57739395", "0.57739395", "0.57739395", "0.57739395", "0.57739395", "0.57739395", "0.5768872", "0.5768539", "0.5762165", "0.5762165", "0.5762165", "0.5762165", "0.57601696", "0.5753174", "0.57520413", "0.5746914", "0.5743063", "0.5739872" ]
0.88903457
0
Returns the name of the touhou character by it's popularity position.
async def character_popularity( position: P('number', 'Please select a number between 1 and 20', min_value = 1, max_value = 20) ): return MOST_POPULAR_TOUHOU_CHARACTERS[position - 1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _name_champion(self):\n # TODO BREAK TIES\n return max(self.teams, key=lambda team: len(team.wins))", "def get_name():\n\n return character['Name']", "def name_at_position(self, i: int) -> str:\n return self.names[i]", "def pname(name):\n ranks = list(reversed(name.split(';')))\n for i, rank in enumerate(ranks):\n if rank in ['Others', 'Unassigned']:\n return rank\n if rank == '__':\n continue\n if rank.split('__')[1] is '':\n return ranks[i+1] + ';' + rank\n return rank", "def get_character_name(self):\n return self.name", "def sort_name(sprite):\n return sprite.name", "def name(self) -> str:\n try:\n return self.stats[\"Player name\"]\n except KeyError as ke:\n logger.debug(ke, exc_info=True)\n logger.warn(\"unable to get player name\")\n return \"\"", "def getCaptainName(self):\n if self.currentCaptainName == self.maxCaptainNames-50:\n # used up all captain names, build a new list of names\n self.setCaptainNames()\n # grab the latest name\n name = '%s %s' % (self.captainNames[self.currentCaptainName], self.captainNames[self.currentCaptainName+1])\n self.currentCaptainName += 2\n return name", "def getName(self):\n\n if self.vel == 0:\n return \"Rest\"\n\n order = self.nbr % 12 # Position in octave (Ex: 0 - C, 1 - C#,...)\n return ['C', 'C#/Db', 'D', 'D#/Eb',\n 'E', 'F', 'F#/Gb', 'G',\n 'G#/Ab', 'A', 'A#/Bb', 'B'][order]", "def species_name(self):\n return self.get(self._names[\"species_name\"])", "def get_position():\n\n return character['Position']", "def get_name(self):\n if type(self.chat) == WebElement:\n return self.chat.find_element_by_xpath(\".//span[@dir='auto']\").text", "def species(self):\n return self.name", "def getname(self):\n if 'chtt' in self.data:\n return self.data['chtt']", "def name_at_position(self, i: int) -> str:\n upper = len(self.names) - 1\n if not 0 <= i <= upper:\n raise ValueError(f'Column index must be between 0 and {upper:d}, inclusive')\n return self.names[i]", "def get_name(text):\n names = get_pokemon_names()\n for pokemon_name in names:\n pattern = r\"(\" + pokemon_name + \")\"\n m = re.search(pattern, text, re.IGNORECASE | re.MULTILINE)\n if m:\n name = m.groups(0)[0]\n return name\n return UNKNOWN", "def get_top_char(probas, char_size, top_n=5):\n p = np.squeeze(probas)\n p[np.argsort(p)[:-top_n]] = 0.0\n p = p / np.sum(p)\n ch_id = np.random.choice(char_size, 1, p=p)[0]\n return ch_id", "def get_name(self):\n return self._player_name", "def get_name() -> str:", "def name(self):\n return self.data[\"attributes\"][\"stats\"][\"name\"]", "def display_name(self):\n if len(self.current_hand) == 0:\n return self.nume\n else:\n card_sum = self.get_cards_sum()\n blackjack = ''\n if card_sum == 21:\n blackjack = 'BLACKJACK !!!'\n return ('%s [%s] - %d' %\n (self.nume, self.get_cards_str(), card_sum)) + blackjack", "def word(self):\n return str(self.name.names[-1])", "def parse_name(relation):\n if relation.player:\n char_ob = relation.player.char_ob\n return \"%s %s\" % (char_ob.key, char_ob.item_data.family)\n else:\n return str(relation)", "def characterName(self):\n return self._characterName", "def get_name():", "def position(self, seat):\n index = seat - self.button_seat\n return PokerStarsParser.position_name_list[self.players_number-2][index]", "def get_random_lastname_scottish ():\n name = db.get_database(DB_LAST_GAELIC2).random_pop()\n if not name:\n return get_random_lastname_simple()\n return \"%s%s\" % (random.choice(('Mc', 'Mac')), name)", "def char_name(character_object, verbose_where=False, watch_list=None):\n watch_list = watch_list or []\n cname = character_object.name\n if character_object in watch_list:\n cname += \"{c*{n\"\n if character_object.player_ob and character_object.player_ob.db.lookingforrp:\n cname += \"|R+|n\"\n if not verbose_where:\n return cname\n if character_object.db.room_title:\n cname += \"{w(%s){n\" % character_object.db.room_title\n return cname", "def get_character(self):\n\n return self.suggestion_set[2]", "def get_name(ticker_symbol, page=None):\n if page is None:\n page = scrape_page(BASE_URL + ticker_symbol)\n\n sentiment = page.xpath(FULL_NAME_XPATH)\n\n if not sentiment:\n return None\n else:\n return sentiment[0].replace(\"\\n\", \"\")", "def getName(self):\n l = []\n for wt in self.weights:\n l.append(chr( int( 97 + (sum(map(sum,wt)) * 10) % 26 ) ))\n for bs in self.bias:\n #print(\"BS: \"+str(bs[0]))\n l.append(chr( int( 97 + (sum(bs) * 10) % 26 ) ))\n l[0] = chr(ord(l[0]) - 32)\n self.name = ''.join(l)\n return self.name", "def score_name(self) -> str:\n return self._score_name", "def get_name_from_player(player):\r\n return player.name.lower()", "def get_random_lastname_irish ():\n name = db.get_database(DB_LAST_GAELIC1).random_pop()\n if not name:\n return get_random_lastname_simple()\n return \"O'%s\" % name", "def fix_name_nga(artist):\n if \"sculptor\" in artist:\n return artist[:artist.find(\"sculptor\")].strip()\n else:\n return artist.strip()", "def name(self):\n return 'n' + self._name\n # if self.children:\n # return 'fossil_' + self._name\n # else:\n # return 'society_' + self._name", "def get_name(self):\n \n # Return the player's name\n return self._name", "def get_hp():\n\n return character['HP']", "def in_battle_name(self):\n if self.trainer:\n return u\"{0}'s {1}\".format(self.trainer.name, self.nickname)\n else:\n return u\"Wild {0}\".format(self.nickname)", "def getName(self, index) -> Str:\n ...", "def current_name(self):\n return self.name_set.order_by('-vote')[0]", "def displayName(self):\n return self.tr('SE Rimozione Inquinanti')", "def getName(self):\n return _libsbml.Species_getName(self)", "def _name(self, upcase = True):\n s = ''\n if self.Z >= 0:\n if upcase:\n s = Elements[self.Z]\n else:\n s = elements[self.Z]\n if self.F & self.F_GROUP_MASK == self.F_ISOBAR:\n s = 'A:'\n if self.F & self.F_GROUP_MASK == self.F_ISOTONE:\n s = 'N:'\n if self.A != 0 or (self.F & self.F_GROUP_MASK == self.F_ISOTONE):\n s += \"{:d}\".format(self.A)\n if self.A == 1 and self.Z == 0 and (self.F & self.F_GROUP_MASK == self.F_ISOTOPE):\n s = 'n'\n if self.F & self.F_GROUP_MASK == self.F_ISOMER:\n if self.A == 0 and self.Z == 0:\n if self.E == 1:\n s = 'g'\n else:\n s = 'g{:d}'.format(self.E)\n else:\n s += self.isomer_name(self.E)\n if self.F & self.F_GROUP_MASK == self.F_BOSON:\n if self.A == 0 and self.Z == 0:\n if self.E == 1:\n s = 'g'\n else:\n s = 'g{:d}'.format(self.E)\n else:\n raise NotImplementedError()\n s = self._SPECIAL.get(self.idx, s)\n return s", "def display_genre(self):\n return ', '.join(genre.name for genre in self.genre.all()[:3])", "def get_names():\n only_links = SoupStrainer(\"a\")\n names = set()\n doc = requests.get(NAMES_URL).content\n links = BeautifulSoup(doc, \"html.parser\", parse_only=only_links)\n pokemon = links.find_all(title=re.compile(\"(\\w+)(\\s){1}(\\(Pokรฉmon\\))\"))\n for cell in pokemon:\n names.add(str(cell.string))\n \n\n return names", "def get_title(self, obj):\n title = obj.habit.title\n return title", "def display_genre(self):\n\n\t\treturn ', '.join(genre.name for genre in self.genre.all()[:3])", "def getName(self):\n\n return self.player", "def get_player_char(self):\n _data_address = self.game_reading.read_int(Lf2AddressTable.DataPointer)\n for i in range(len(Lf2AddressTable.DataFile)):\n Lf2AddressTable.DataFile[i] = self.game_reading.read_int(_data_address + i * 4)\n\n for i, item in enumerate(Char_Name):\n Char_Name[item] = Lf2AddressTable.DataFile[i]\n\n for name, i in Char_Name.items():\n if i == self.DataAddress:\n return name\n return ''", "def get_species_name(fasta):\n name = fasta.description\n if ',' in name:\n name = ','.join(name.split(',')[:-1])\n name = ' '.join(name.split()[1:])\n if name.endswith(' '):\n name = name[:-1]\n if name.endswith(','):\n name = name[:-1]\n return name", "def indexed_name(self):\n return self._json['author-profile'].get('preferred-name', {}).get('indexed-name')", "def get_hero_name(self, i):\n for hero in self.heroes:\n if hero['id'] == i:\n return hero['localized_name']\n return 'Unknown Hero'", "def __str__(self):\n return f\"{self.rank.title()} of {self.suit.title()}\"", "def display_genre(self):\n return ', '.join([ genre.name for genre in self.genre.all()[:3] ])", "def getElementName(self):\n return _libsbml.SpeciesGlyph_getElementName(self)", "def getName():", "def getName():", "def getName():", "def getName():", "def getName():", "def getName():", "def get_display_name(member):\n if member.nick is None:\n name = member.name\n else:\n name = member.nick\n if User.objects.get(id=member.id).is_ironman:\n name += ' (IM)'\n return name", "def get_random_lastname_simple ():\n return db_random_pop_default(DB_LAST_SIMPLE, \"Doe\")", "def get_name(self):\n return self._g.get_name()", "def name(who):\r\n if who == 0:\r\n return 'Player 0'\r\n elif who == 1:\r\n return 'Player 1'\r\n else:\r\n return 'An unknown player'", "def display_get_most_played():\n title_game = reports.get_most_played(filename)\n print(\"Title of most played game in {}: {}\\n\".format(filename, title_game))", "def broj_u_string(self):\n if self.computer_number == 0:\n self.computer_choice_name = \"rock\"\n elif self.computer_number == 1:\n self.computer_choice_name = \"Spock\"\n elif self.computer_number == 2:\n self.computer_choice_name = \"paper\"\n elif self.computer_number == 3:\n self.computer_choice_name = \"lizard\"\n elif self.computer_number == 4:\n self.computer_choice_name = \"scissors\"\n else:\n self.computer_choice_name = None\n raise RpslsError(103)\n return self.computer_choice_name", "def get_name(self):\n return self.soup.find('div', id = 'zh-topic-title').h1\\\n .get_text(strip = True).encode(CODE)", "def get_random_lastname_family ():\n if one_chance_in(3):\n return get_random_lastname_irish ()\n elif coinflip():\n return get_random_lastname_scottish ()\n else:\n return get_random_lastname_nameson()", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def display_genre(self):\n \n # Get first 3 genres and join to a string.\n return ', '.join([ genre.name for genre in self.genre.all()[:3] ])", "def get_name():\n return \"Boss\"", "def charname(self):\n return self._charname", "def pos_to_name(reg):\n l,b = position_region(reg).galactic() \n if numpy.sign(b) == 1:\n pm = \"+\"\n else:\n pm = \"-\"\n text = \"G%4.2f%1s%4.2f\" % (l,pm,abs(b))\n return text", "def BattleOfTheSexes():\n g = CoordinationGame(A=3, a=2, B=0, b=0, C=1, c=1, D=2, d=3)\n g.rename('Battle of the sexes - ' + repr(g))\n return g", "def get(name):\n position = Position.objects.get_or_create(name=name.lower())\n if isinstance(position, tuple):\n position = position[0]\n position.auto_title()\n return position", "def name(self):\n name = self.__telegram_info.message.from_user.name\n return name[0].upper() + name[1::]", "def extract_names(filename):\n raw_text = read_html(filename) \n \n #searching for the year\n year = re.search('(<h3 align=\"center\">Popularity in )(\\d\\d\\d\\d)',raw_text).group(2)\n \n #searching for the list of names\n list_of_names = re.findall('<td>(\\d+)</td><td>(\\w+)</td><td>(\\w+)</td>',raw_text)\n \n #pair each name with it's rank\n name_and_rank = [] \n for line in list_of_names:\n name_and_rank.append((line[1], line[0]))\n name_and_rank.append((line[2], line[0]))\n \n # sort the list alphabetically\n name_and_rank = sorted(name_and_rank, key = lambda x:x[0])\n name_and_rank = dict(name_and_rank)\n\n return year, name_and_rank[:20]", "def get_name_popularity():\n \n # Parse HTTP GET parameters\n name = request.args.get('name')\n sex = request.args.get('sex')\n \n # Extract year and rank in year for the given name-sex combination\n name_subset = babynames[(babynames['name'] == name) & (babynames['sex'] == sex)]\n name_years = name_subset['year'].tolist()\n name_ranks = name_subset['rank_in_year'].tolist()\n \n # Some names do not appear in all years\n #\n # Build the return list with a value of None for the years where\n # the given name does not appear\n result = []\n \n for year in range(MIN_YEAR, MAX_YEAR + 1):\n if year not in name_years:\n result.append(None);\n else:\n result.append(name_ranks.pop(0))\n \n # Return as JSON\n # Python None values are automatically parsed to JavaScript null\n return_object = {'data': result}\n return Response(json.dumps(return_object), mimetype='application/json')", "def get_name(self):\n return self.children[0]", "def get_full_label(self):\n return Card.ranks[self.ranks[0]] + '-High' if self.rank == 0 else \\\n self.get_label()", "def get_name(self):", "def get_name(self):", "def get_name(self):\n\n return ri.RhinoInput(self.last).get_name()", "def get_character(self):\n return self.character", "def getElementName(self):\n return _libsbml.ListOfSpeciesGlyphs_getElementName(self)", "def get_sub_name(self):\n return self.sub_name", "def top_emojis(df, name):\n counter = Counter()\n df.loc[df[\"from\"] == name][\"emojis\"].str.split(\",\").apply(counter.update)\n counter = (sorted(counter.items(), key=lambda x: x[1], reverse=True))\n return counter", "def get_name(self):\n\t\treturn self.name", "def get_name(self):\n\t\treturn self.name", "def get_name(self):\n\t\treturn self.name", "def name(self, obj, index=0):\n if hasattr(obj, 'id'):\n uid = obj.id.replace('material', 'm')\n else:\n self._namecount += 1\n uid = 'Untitled.' + str(self._namecount)\n base = '%s-%d' % (uid, index)\n if base not in self._names:\n self._namecount += 1\n self._names[base] = '%s-%.4d' % (base[:MAX_NAME_LENGTH], self._namecount)\n return self._names[base]", "def shotparser(shot: str):\n characters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 't', 'u',\n 'v', 'w', 'x', 'y', 'z']\n # Get index of letter\n return int(shot[1:]) - 1, characters.index(shot[0])", "def name(self, cname: str)->str:\n return self.like(cname, mx=1)[0]['cname']", "def display_genre(self, *args):\n return ', '.join(genre.name for genre in args[0].genre.all()[:3])" ]
[ "0.615823", "0.61189646", "0.60745996", "0.57972014", "0.5788074", "0.5758207", "0.57052803", "0.5668703", "0.55920833", "0.55471593", "0.55327225", "0.55151343", "0.5483696", "0.547799", "0.54644465", "0.5454449", "0.542765", "0.5390605", "0.53518206", "0.532722", "0.5312515", "0.53079027", "0.5295991", "0.52946824", "0.5292416", "0.52893597", "0.52882314", "0.52872854", "0.5269813", "0.5267011", "0.5251083", "0.52045196", "0.5196751", "0.51868266", "0.5182492", "0.51759243", "0.51545155", "0.51543957", "0.51484686", "0.5139736", "0.5135188", "0.5132654", "0.51250964", "0.5113156", "0.5102175", "0.5099599", "0.5095827", "0.509562", "0.5095198", "0.5087273", "0.50827795", "0.5078417", "0.5061948", "0.5061256", "0.5056478", "0.5044506", "0.5031059", "0.5031059", "0.5031059", "0.5031059", "0.5031059", "0.5031059", "0.502954", "0.5028198", "0.50264233", "0.501709", "0.50159234", "0.5011304", "0.5004386", "0.50033975", "0.500271", "0.500271", "0.500271", "0.500271", "0.500271", "0.5000259", "0.49960586", "0.49924883", "0.4988967", "0.49866685", "0.49790385", "0.4973629", "0.49726853", "0.49723423", "0.49663895", "0.4964305", "0.49615318", "0.49615318", "0.49542564", "0.49524888", "0.49495322", "0.49484864", "0.49389306", "0.49382815", "0.49382815", "0.49382815", "0.4936998", "0.49363947", "0.4933506", "0.49322268" ]
0.7177974
0
Edit's the selected user's nick.
async def set_nick( client, event, user: ('user', 'Who\'s?'), nick: P(str, 'Their new nick', min_length = 1, max_length = 32) = None, ): yield await client.user_guild_profile_edit(event.guild, user, nick=nick) yield f'{user:f}\'s nick has been updated'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def nick(\n self, context: Context, user: discord.User, *, nickname: str = None\n ) -> None:\n member = context.guild.get_member(user.id) or await context.guild.fetch_member(\n user.id\n )\n try:\n await member.edit(nick=nickname)\n embed = discord.Embed(\n description=f\"**{member}'s** new nickname is **{nickname}**!\",\n color=0x9C84EF,\n )\n await context.send(embed=embed)\n except:\n embed = discord.Embed(\n description=\"An error occurred while trying to change the nickname of the user. Make sure my role is above the role of the user you want to change the nickname.\",\n color=0xE02B2B,\n )\n await context.send(embed=embed)", "async def nick(self, ctx, *, nickname):\n if len(nickname) > 32:\n await ctx.send(\"Nickname must be 32 characters or fewer\")\n return\n await ctx.me.edit(nick=nickname)\n await ctx.send(f\"Nickname changed to {nickname}\")", "async def nick(self, context: SlashContext, user: discord.User, nickname: str = None):\n author = await context.guild.fetch_member(context.author_id)\n if not author.guild_permissions.manage_nicknames:\n embed = discord.Embed(\n title=\"Error!\",\n description=\"You don't have enough permissions to change the nickname of this user.\",\n color=0xE02B2B\n )\n return await context.send(embed=embed)\n member = await context.guild.fetch_member(user.id)\n try:\n await member.edit(nick=nickname)\n embed = discord.Embed(\n title=\"Changed Nickname!\",\n description=f\"**{member}'s** new nickname is **{nickname}**!\",\n color=0x42F56C\n )\n await context.send(embed=embed)\n except:\n embed = discord.Embed(\n title=\"Error!\",\n description=\"An error occurred while trying to change the nickname of the user. Make sure my role is above the role of the user you want to change the nickname.\",\n color=0xE02B2B\n )\n await context.message.channel.send(embed=embed)", "def set_nick(self, nick):\n raise NotImplementedError", "def change_nick(self, before, after):\n userdata = self.users[irc.strings.lower(before)]\n self.del_user(before)\n self.add_user(after, userdata)", "def change_nick_in_room(self, login, room, nick):\n pass", "async def _nick(self, nick: str) -> str:\n\n logger.debug(f\"Setting nick to {nick!r}\")\n\n self._target_nick = nick\n\n reply = await self._connection.send(\"nick\", {\"name\": nick})\n data = self._extract_data(reply)\n\n new_nick = data[\"to\"]\n self._target_nick = new_nick\n\n if self._session is not None:\n self._session = self._session.with_nick(new_nick)\n\n logger.debug(f\"Set nick to {new_nick!r}\")\n\n return new_nick", "def update_nick(self, nick):\n if self.nick == nick:\n return\n\n # Update the nick hashmap\n if self.nick:\n self.users.nick_hashmap[self.nick].remove(self)\n self.users.nick_hashmap[nick].append(self)\n\n LOG.info(\"Updating user nick: {} -> {}\".format(self.nick, nick))\n\n self.nick = nick\n\n self.users.modified_callback()", "async def change_nick(self, member: discord.Member, nick_chosen: str):\n try:\n await self.bot.change_nickname(member, nick_chosen)\n await self.bot.say(\"{0} nickname successfully changed to '{1}'\".format(member, nick_chosen))\n except discord.HTTPException:\n await self.bot.say(\"[ERROR:HTTPException] {0.name} has not enough permissions.\".format(self.bot.user))", "def irc_NICK(self, prefix, params):\n user = re.match(self.user_regex, prefix)\n new_nick = params[0]\n\n self.logger.debug(\n \"%s!%s@%s changed nick to %s\" %\n (user.group(1), user.group(2), user.group(3), new_nick)\n )\n\n self.event_manager.fire(\"irc.nick\", user, new_nick)", "async def nickname(self, ctx, *, nickname=\"\"):\n # [p]set nickname <nickname>\n\n nickname = nickname.strip()\n if nickname == \"\":\n nickname = None\n try:\n await self.bot.change_nickname(ctx.message.server.me, nickname)\n await self.bot.say(\"Done.\")\n except discord.Forbidden:\n await self.bot.say(\"I cannot do that, I lack the \"\n \"\\\"Change Nickname\\\" permission.\")", "def _switch_nick(self):\n self.nickname = self.firstnick + str(random.randint(1000, 9999))\n self._log(self.botlog, 'Switching to nick %s' % self.nickname)\n self._send('NICK %s' % self.nickname)", "def sendnick(self):\n self._send(\"NICK %s\" % (CONFIG[\"nick\"]))", "async def update_nickname(guild_id):\n user_id = await token_check()\n await guild_check(user_id, guild_id)\n\n j = validate(await request.get_json(), {\n 'nick': {'type': 'nickname'}\n })\n\n nick = j['nick'] or None\n\n await app.db.execute(\"\"\"\n UPDATE members\n SET nickname = $1\n WHERE user_id = $2 AND guild_id = $3\n \"\"\", nick, user_id, guild_id)\n\n member = await app.storage.get_member_data_one(guild_id, user_id)\n member.pop('joined_at')\n\n # call pres_update for nick changes, etc.\n await app.dispatcher.dispatch(\n 'lazy_guild', guild_id, 'pres_update', user_id, {\n 'nick': j['nick']\n })\n\n await app.dispatcher.dispatch_guild(guild_id, 'GUILD_MEMBER_UPDATE', {**{\n 'guild_id': str(guild_id)\n }, **member})\n\n return j['nick']", "def _edit_user(self):\n users = fileIO.load_json(\"users.json\")\n print(\"The list of users is as follows: \")\n for i in users:\n print(users[i][\"name\"])\n #List specific user's settings and get user id\n userID = self._list_user_settings(users)\n #Loop until valid option given\n option = False\n while not option:\n option = input(\"Please enter the setting you would like to change: \")\n if option not in users[userID]:\n option = False\n print(\"That setting is not valid.\")\n #Get input for new setting\n args = input(\"Please enter what you would like to change that setting to: \")\n #Output\n command = \"edit_user {0} {1} {2}\\r\\n\".format(userID, option, args)\n return(command)", "def nickname(self, new_nickname):\r\n self.set({\"nickname\": new_nickname})", "def change_username(self, name):\n self.username = name", "def edit_username(request):\n user = request.user\n\n form = forms.UsernameChangeForm(instance=user)\n if request.method == 'POST':\n form = forms.UsernameChangeForm(instance=user, data=request.POST)\n\n if form.is_valid():\n form.save()\n messages.success(request, 'Your username has been updated.')\n else:\n user = User.objects.get(id=user.id)\n\n return render(request, 'user/edit_username.html', {'form':form,\n 'user':user})", "def edit_user(self):\n from editWindow import EditPlayer\n self.edit = EditPlayer(self.lang, self.result_table.currentItem().text())\n self.edit.show()", "def home_edituser():\n\tpass", "def update_user():", "async def assign_clan(self, ctx, user : discord.Member, *, clanname=\"\"):\r\n nickname = '[{}] {}'.format(clanname.strip(), user.name)\r\n if clanname == \"\":\r\n nickname = None\r\n try:\r\n await self.bot.change_nickname(user, nickname)\r\n await self.bot.say(\"Done.\")\r\n except discord.Forbidden:\r\n await self.bot.say(\"I cannot do that, I lack the \"\r\n \"\\\"Manage Nicknames\\\" permission.\")", "async def name(self, ctx:utils.Context, *, username:str):\n\n if len(username) > 32:\n await ctx.send('That username is too long.')\n return\n await self.bot.user.edit(username=username)\n await ctx.send('Done.')", "def on_nick(self, raw_msg, source, old_nickname, new_nickname, **kwargs):", "def irc_NICK(self, prefix, params):\n old_nick = prefix.split('!')[0]\n new_nick = params[0]\n self.logger.log(\"%s is now known as %s\" % (old_nick, new_nick))", "def _request_pwd_change(self, nick):\n user = nick.split('!')[0]\n if ':new_master' in self.users and user == self.users[':master']:\n self.users[user]['authenticated_at'] = datetime.datetime.now()\n self.bot.client.send('PRIVMSG', user, ':Please change your \\\npassword as soon as possible using the change_password command!')", "def _optionsmenu_changeusername():\n\n self.helpindex = Toplevel(self.master)\n self.helpindex.title(\"Change Username\")\n self.helpindex.geometry(\"300x500\")", "def on_nick_change(bot, trigger):\n\told_nick = trigger.nick\n\tnew_nick = Identifier(trigger.args[0])\n\tfor channel in bot.privileges:\n\t\tif new_nick in bot.privileges[channel]:\n\t\t\tlog(bot, channel, '*** {} is now known as {}', old_nick, new_nick);", "def set_nickname(self, nickname):\n \n if len(nickname) > globals.MAX_NICKNAME_LENGTH:\n nick = nickname[0:globals.MAX_NICKNAME_LENGTH-3]+\"...\"\n else:\n nick = nickname\n \n self._nickname.set_message(nick)", "def nick(self):\n if(not SOCKET_TO_USERID.has_key(self.source)):\n new_userid = _char_list_to_string(random.sample(ALPHABET, USERID_LENGTH))\n while(USERID_TO_SOCKET.has_key(new_userid)):\n new_userid = _char_list_to_string(random.sample(ALPHABET, USERID_LENGTH))\n USERID_TO_SOCKET[new_userid] = self.source\n SOCKET_TO_USERID[self.source] = new_userid\n self.send()", "def test_nickChange(self):\n oldnick = \"foo\"\n newnick = \"bar\"\n self.protocol.register(oldnick)\n self.protocol.irc_RPL_WELCOME(\"prefix\", [\"param\"])\n self.protocol.setNick(newnick)\n self.assertEqual(self.protocol.nickname, oldnick)\n self.protocol.irc_NICK(\"{}!quux@qux\".format(oldnick), [newnick])\n self.assertEqual(self.protocol.nickname, newnick)", "async def name(self, ctx, *, name):\n # [p]set name <name>\n\n name = name.strip()\n if name != \"\":\n try:\n await self.bot.edit_profile(username=name)\n except:\n await self.bot.say(\"Failed to change name. Remember that you\"\n \" can only do it up to 2 times an hour.\"\n \"Use nicknames if you need frequent \"\n \"changes. {}set nickname\".format(ctx.prefix))\n else:\n await self.bot.say(\"Done.\")\n else:\n await send_command_help(ctx)", "def update_username(self, new_username, id):\n self.execute(TABELLE['users']['update'], (new_username, id,))", "def update_user(id):\n pass", "def edit_user_name(self, dto):\n user_id = dto[\"user_id\"]\n user_pin = dto[\"pin\"]\n new_user_name = dto[\"new_user_name\"]\n user = self._find_user_by_id_and_pin(user_id, user_pin)\n self.validate_user_name(new_user_name)\n user[\"user_name\"] = new_user_name\n self._user_dao.save_user(user)", "def donick(self, nick, setorig=0, save=0, whois=0):\n\n if not nick:\n return\n\n # disable auto 433 nick changing\n self.noauto433 = 1\n\n # set up wait for NICK command and issue NICK\n queue = Queue.Queue()\n nick = nick[:16]\n self.wait.register('NICK', self.nick[:16], queue, 12)\n self._raw('NICK %s\\n' % nick)\n result = waitforqueue(queue, 5)\n\n # reenable 433 auto nick changing\n self.noauto433 = 0\n if not result:\n return 0\n self.nick = nick\n\n # send whois\n if whois:\n self.whois(nick)\n\n # set original\n if setorig:\n self.orignick = nick\n\n # save nick to state and config file\n if save:\n self.state['nick'] = nick\n self.state.save()\n self.cfg.set('nick', nick)\n self.cfg.save()\n return 1", "def update_user():\n #TODO user update \n pass", "def on_the_edit_acl_select_the_user_name(driver, user_name):\n assert wait_on_element(driver, 5, '//h1[text()=\"Edit ACL\"]')\n assert wait_on_element(driver, 5, '//div[contains(.,\"Owner:\") and contains(@class,\"control\")]//input', 'inputable')\n driver.find_element_by_xpath('//div[contains(.,\"Owner:\") and contains(@class,\"control\")]//input').clear()\n driver.find_element_by_xpath('//div[contains(.,\"Owner:\") and contains(@class,\"control\")]//input').send_keys(user_name)", "def on_nicknameinuse(self, c, e):\n c.nick(c.get_nickname() + \"_\")", "def set_username(self, value):\n self.username = value", "def enter_username(self):", "def setName(self, newName):\n self.__username = newName", "async def edit(self, name=None):\n\t\tsanitized_name = utilities.sanitize_string(str(name))\n\n\t\tif sanitized_name in {'', None}:\n\t\t\traise exceptions.ClientError('INVALID_NAME')\n\n\t\tif sanitized_name == self.name:\n\t\t\traise exceptions.ClientError('INVALID_NAME')\n\n\t\tif not 0 < len(sanitized_name) < 32:\n\t\t\traise exceptions.ClientError('INVALID_NAME')\n\n\t\tself.name = sanitized_name\n\n\t\tif self.group != None:\n\t\t\tfor member in self.group.members:\n\t\t\t\tif member.name == sanitized_name and member.uid != self.uid:\n\t\t\t\t\traise exceptions.ClientError('TAKEN_NAME')\n\n\t\t\tawait self.group.update_user(self)", "def userRenamed(self, oldname, newname):\n # Send messasge to Server bot.\n self.data_in(text=\"\", type=\"renamed\", oldname=oldname, newname=newname)", "def nickname(request):\n FP = request.POST.get('fingerprint')\n Nick = re.sub('^ ', '_', request.POST.get('new-nick').rstrip())\n Nick = re.sub('[^_ a-zA-Z0-9]', '', Nick)\n A = addressbook.address.Address.objects.get(fingerprint=FP)\n A.nickname = Nick\n A.save()\n return HttpResponse(json.dumps({'ok':True,\n 'fp':A.fingerprint,\n 'nick':A.nickname,}),\n content_type='application/json')", "def change_username():\n if request.method == 'POST':\n username = get_username()\n new_username = request.form['change_username']\n user_id = get_id_from_username(username)\n #TODO: Error handling on database writes lol\n change_username_from_id(user_id, new_username )\n return redirect(url_for('users.account_page', username=new_username))", "def enter_username(self, user=TestData.USERS[0]):\n self.username.clear()\n self.username.click()\n self.username.send_keys(user)\n return self.username", "def senduser(self):\n self._send(\"USER %s * * :%s\" % (CONFIG[\"nick\"], CONFIG[\"real\"]))", "def change(login):\n try:\n manager = Actions()\n manager.change_user(login)\n except Exception as e:\n print(e)", "def updateUsers(self):\n sel = self.userbox.GetValue()\n usernames = ['None'] + c.getUserNames()\n self.userbox.SetItems(usernames)\n if sel in self.userbox.GetItems():\n self.userbox.SetValue(sel)\n if sel == 'None':\n self.btnUserSettings.Enable(False)\n else:\n self.btnUserSettings.Enable(True)\n else:\n self.userbox.SetSelection(0)\n self.btnUserSettings.Enable(False)", "def do_edit(self, args):\n member = None\n rowid = args.split(' ')[0]\n \n # loop till we get a rowid which matches a member in the database\n while True:\n rowid = self.validateRowid(rowid)\n if rowid is None:\n rowid = input('Enter member id: ')\n continue\n \n member = self.roster.get(rowid)\n if member is None:\n print(\"No member with id of %d\" % rowid)\n # rowid will get validated again, but it's the same value\n # which already passed validation\n continue\n \n break\n \n print('Editing %s %s' % (member.first, member.last))\n print('Type new value, hit enter to keep current value, or enter spaces to clear a value')\n member.first = self.getNewValue('First name', member.first)\n member.last = self.getNewValue('Last name', member.last)\n member.introducedDate = self.getNewValue('introduced date', member.introducedDate) \n \n self.roster.update(member)", "def change_user(\n self,\n username: Optional[str],\n cookie: Optional[str],\n info: Optional[Dict[str, Any]],\n ):\n if cookie is None and info is not None:\n raise ValueError(\"Cannot set login info if there is no login cookie\")\n if username is None:\n if cookie is not None:\n raise ValueError(\n \"Cannot login without providing a user name at the same time\"\n )\n self._reset(False)\n self._save()\n else:\n old_study = self.default_study # try to preserve it if possible\n self._reset(False)\n self._state[\"username\"] = username\n self._state[\"cookie\"] = cookie\n self._state[\"info\"] = info\n self._state[\"study\"] = None\n if old_study is not None:\n inf2 = self.user_info\n if inf2 is not None:\n ids = inf2.matching_study_ids(old_study)\n if len(ids) == 1:\n self._state[\"study\"] = ids[0]\n self._save()", "def on_nicknameinuse(self, conn, event) -> None:\n self._nickname += '_'\n conn.nick(self._nickname)", "def on_userjoin(self, server, channel, nick):\n self._request_pwd_change(nick)", "def send_as_nick(self, command, msg):\n self._write(f':{self.ident.nick} {command} {msg}')", "def op(self,nick):\n self.logger.debug(\"giving ops to %s\" % nick)\n self.connection.mode(self.config[\"IRC/channel\"],\"+o \"+nick)", "def _onSelectUser(self, event):\n c.loadUser(self.userbox.GetValue())\n if self.userbox.GetSelection() == 0:\n self.btnUserSettings.Enable(False)\n else:\n self.btnUserSettings.Enable(True)", "def update_username(self, old_username, new_username):\n raise NotImplementedError()", "def set_username(old_name, new_name):\n if not validate_username(new_name):\n return \"kรคyttรคjรคnimi on vรครคrรครค muotoa\"\n if user_exists(new_name):\n return \"kรคyttรคjรคnimi on jo kรคytรถssรค\"\n sql = \"UPDATE users \" \\\n \"SET username=:new \" \\\n \"WHERE username=:old\"\n db.session.execute(sql, {\"new\": new_name, \"old\": old_name})\n db.session.commit()\n return \"ok\"", "def changeName(self, userId, newName):\n\t\turi = \"{}/users/{}\".format(tt_base_uri, userId)\n\t\turi_args = {\"name\":newName}\n\t\tr = requests.put(uri, json=uri_args, cookies={\"PLAY_SESSION\":self.play_session, \"__uvt\":\"\"})\n\t\tprint(\"change name: status code:\", r.status_code)", "def rename(self,newName):\n self.userName = newName", "async def botname(ctx, *, new_name=None):\n if ctx.message.channel.name.lower() not in bot_channels:\n return\n\n member_roles = ctx.message.author.roles\n member_admin = discord.utils.find(lambda r: r.name.lower() in admin_roles, member_roles)\n if member_admin is not None:\n bot_member = discord.utils.find(lambda m: m.id == amor_manager.user.id, ctx.message.server.members)\n await amor_manager.change_nickname(bot_member, new_name)", "def refresh_public_nickname(user: User) -> str:\n nick = _random_nickname()\n while DBDiscussionSession.query(User).filter_by(public_nickname=nick).first():\n nick = _random_nickname()\n\n LOG.debug(\"User %s -> %s\", user.public_nickname, nick)\n user.set_public_nickname(nick)\n\n return nick", "def update_user(BrokerId=None, ConsoleAccess=None, Groups=None, Password=None, Username=None):\n pass", "def donick(self, nick, *args, **kwargs):\n pass", "def addpoint_command(jenni, input):\n nick = input.group(2)\n if nick:\n nick = nick.strip().split()[0]\n scores.editpoints(jenni, input, nick, True)", "def edit_profile(self, name, username, email):\n return self.app.post('/_editProfile', data = dict(\n name = name,\n username = username,\n email = email\n ), follow_redirects = True)", "async def massnick(ctx, nickname: str):\n server = ctx.message.server\n counter = 0\n for user in server.members:\n if user.nick is None:\n nickname = \"{} {}\".format(nickname, user.name)\n else:\n nickname = \"{} {}\".format(nickname, user.nick)\n try:\n await bot.change_nickname(user, nickname)\n except discord.HTTPException:\n counter += 1\n continue\n await bot.say(\"Finished nicknaming server. {} nicknames could not be completed.\".format(counter))", "def alterCollidedNick(self, nickname):\n return nickname + '^'", "def set_real_name(user: User, real_name: str=\"\") -> Result:\n current, *rest = user.pw_gecos.split(\",\")\n if current == real_name:\n return Result(State.unchanged)\n command([\"/usr/bin/chfn\", \"--full-name\", real_name, user.pw_name])\n user.pw_gecos = \",\".join([real_name, *rest])\n return Result(State.success)", "def set_username(self, value):\n raise NotImplementedError('set_username')", "def put_userdata(self, server, channame, nick, datapair):\n skey = server.lower()\n ckey = irc.strings.lower(channame)\n if skey in self.serverchans and ckey in self.serverchans[skey]:\n self.serverchans[skey][ckey].put_userdata(nick, *datapair)", "def ClientUserInfoChanged(self, clientnum):\n cl = Client(clientnum)\n \n current_name = cl[\"name\"]\n new_name = current_name[::-1] #reverse the string\n cl[\"name\"] = new_name #update userinfo (effective in game)\n #short version : cl[\"name\"] = cl_[\"name\"][::-1]", "def test_overrideAlterCollidedNick(self):\n nick = \"foo\"\n self.protocol.alterCollidedNick = lambda nick: nick + \"***\"\n self.protocol.register(nick)\n self.protocol.irc_ERR_NICKNAMEINUSE(\"prefix\", [\"param\"])\n lastLine = self.getLastLine(self.transport)\n self.assertEqual(lastLine, \"NICK {}\".format(nick + \"***\"))", "def edit_user():\n if CURR_USER_KEY in session:\n user = g.user\n form = ProfileEditForm(obj=user)\n\n if form.validate_on_submit():\n user.first_name = form.first_name.data\n user.last_name = form.last_name.data\n user.description = form.description.data\n user.email = form.email.data\n user.image_url = form.image_url.data or \"/static/images/default-pic.png\"\n\n db.session.commit()\n\n flash(\"Profile edited.\")\n return redirect(\"/profile\")\n\n return render_template('/profile/edit-form.html', form=form)\n else:\n return redirect('/login')", "def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]", "def op(phenny, input):\n if not input.admin or not input.sender.startswith('#'):\n return\n nick = input.group(2)\n verify = auth_check(phenny, input.nick, nick)\n if verify:\n channel = input.sender\n if not nick:\n nick = input.nick\n phenny.write(['MODE', channel, \"+o\", nick])", "def update(self, user: U) -> None:\n ...", "def on_profile_select(self, index):\n\n old_index = self._selidx\n\n if index == old_index:\n # ignore this; it just means that the user clicked cancel\n # in the \"save changes\" dialog and we're resetting the\n # displayed profile name.\n self.LOGGER << \"Resetting profile name\"\n return\n\n if index < 0:\n # we have a problem...\n self.LOGGER.error(\"No profile chosen?!\")\n else:\n # use userRole to get the 'on-disk' name of the profile\n new_profile = self._selector.currentData(\n Qt.UserRole)\n\n # if no active profile, just load the selected one.\n # if somehow selected the same profile, do nothing\n\n if self.Manager.profile and self.Manager.profile.name == new_profile:\n return\n\n # check for unsaved changes to the mod-list\n reply = self._parent.table_prompt_if_unsaved()\n\n # only continue to change profile if user does NOT\n # click cancel (or if there are no changes to save)\n if reply == QtWidgets.QMessageBox.Cancel:\n # reset the text in the profile selector;\n # this SHOULDn't enter an infinite loop because,\n # since we haven't yet changed\n # self.profile_selector_index, now 'index' will be\n # the same as 'old_index' at the top of this\n # function and nothing else in the program will\n # change (just the name shown in the profile\n # selector)\n self._selector.setCurrentIndex(old_index)\n else:\n self.LOGGER.info(\n f\"Activating profile '{new_profile}'\")\n\n if self.Manager.activate_profile(new_profile):\n\n self.LOGGER << \"Resetting views for new profile\"\n\n # update our variable which tracks the current index\n self._selidx = index\n\n # No => \"Don't save changes, drop them\"\n # if reply == QtWidgets.QMessageBox.No:\n\n # Whether they clicked \"no\" or not, we\n # don't bother reverting, mods list is getting\n # reset; just disable the buttons\n # self.mod_table.undo_stack.clear()\n\n # update name\n self._profile_name = new_profile\n # disable/enable buttons as needed\n self.check_enable_actions()\n\n # tell rest of app about new profile\n self.newProfileLoaded.emit(new_profile)\n else:\n self.LOGGER.error(\"Profile Activation failed.\")\n self._selector.setCurrentIndex(old_index)", "def userFollowers(nick):\n if (len(nick) != 1):\n print \"Has d'introduir nomรฉs un nick\"\n return\n i.userFollow(nick[0])", "def change_server(self, nickname, ip, secret_code):\n secret_code = secret_code or \"pw\"\n cmd = '{}serverRequestPlayerChangeServer \"{}\" {} {}'.format(self.console, Commands.aquote(nickname),\n ip, Commands.aquote(secret_code))\n self.write_command(cmd)", "def change_datta(self):\n column = \"\"\n while column not in ['lastname', 'firstname', 'pseudo', 'email', 'age', 'password']:\n column = input(\"entry champ to change \\n[lastname][firstname], [pseudo], [email], [age], [password]\")\n datta = input(\"enter new datta:\")\n self.user_choice.initialize_connection()\n self.user_choice.cursor.execute(\"UPDATE users set \" + column + \" = %s WHERE pseudo = %s;\", (datta, self.pseudo,))\n self.user_choice.connection.commit()\n self.user_choice.close_connection()", "def onUsernameFocusIn(self,event):\n if self.obj1.get() == \"New Username\":\n self.obj1.delete(0,END)", "async def change_display_name(self, display_name: str):\n re = await self.request.request(url=f'https://users.roblox.com/v1/users/authenticated')\n user_id = re['id']\n data = {\"newDisplayName\": f\"{display_name}\"}\n _ok = await self.request.request(url=f\"https://users.roblox.com/v1/users/{user_id}/display-names\", data=data,\n method=\"patch\")\n return _ok", "async def on_member_update(old, updated):\n if old.nick != updated.nick:\n boterate.update_member(updated)", "def do_user_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.first_name = request.form['first_name']\n targetUser.name = request.form['name']\n targetUser.nick = request.form['nick']\n targetUser.mail = request.form['mail']\n targetUser.role = request.form['role']\n targetUser.state = request.form['state']\n targetUser.gender = request.form['gender']\n targetUser.meter_id = request.form['meter_id']\n targetUser.group_id = request.form['group_id']\n\n db.session.commit()\n return user_list(\"Updated user \" + targetUser.name)", "def mod_user(self, username, data):\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(u\"users/{}\".format(username))\n res = requests.put(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code == 200:\n return Response(0, u\"User {} has been modified\".format(username))\n else:\n return Response(res.status_code, res)", "async def change_username(self, new_username: str, password: str):\n\n data = {\"username\": f\"{new_username}\", \"password\": f\"{password}\"}\n ee = await self.request.request(url=f'https://auth.roblox.com/v2/username', method='post', data=data)\n return ee", "def change_nickname(_) -> int:\n return 1 << 26", "def change_nickname(_) -> int:\n return 1 << 26", "def change_client_name(self, name, client):\n if self.name_is_unique(name):\n client.set_name(name)\n self.send_message('Usuario actualizado exitosamente.', client.get_socket())\n else:\n self.send_message('Nombre repetido.', client.get_socket())", "def edit_person(self, treeview):\n model, iter_ = treeview.get_selection().get_selected()\n if iter_:\n handle = model.get_value(iter_, 0)\n try:\n person = self.dbstate.db.get_person_from_handle(handle)\n EditPerson(self.dbstate, self.uistate, [], person)\n except WindowActiveError:\n pass", "def edit_user(user_id):\n if request.method == 'GET':\n # init form with current user:\n form = ProfileForm(\n nickname = session[Session.PROFILE][\"nickname\"], \n location = session[Session.PROFILE][\"location\"],\n about_me = session[Session.PROFILE][\"about_me\"]\n )\n if request.method == 'POST': \n # init form with POSTed form:\n form = ProfileForm(request.form)\n\n if form.validate(): \n # update backend:\n response = service_user_management.patch(\n id = f'auth0|{user_id}', \n nickname = form.nickname.data, \n location = form.location.data,\n about_me = form.about_me.data\n )\n\n # success:\n if 'identities' in response: \n try:\n # update db:\n delegated_user = DelegatedUser.query.get_or_404(\n user_id, \n description='There is no user with id={}'.format(user_id)\n )\n delegated_user.nickname = form.nickname.data\n # update:\n db.session.add(delegated_user)\n # write\n db.session.commit()\n\n # update session:\n session[Session.PROFILE][\"nickname\"] = form.nickname.data\n session[Session.PROFILE][\"location\"] = form.location.data\n session[Session.PROFILE][\"about_me\"] = form.about_me.data\n \n # on successful profile update, flash success\n flash('Your profile was successfully updated.')\n\n return redirect(url_for('.show_user', user_id = user_id))\n except:\n db.session.rollback()\n # on unsuccessful registration, flash an error instead.\n flash('An error occurred. New account could not be created.')\n finally:\n db.session.close()\n # failure:\n else:\n flash(response['message']) \n else:\n # for debugging only:\n flash(form.errors)\n \n return render_template('users/forms/user.html', form=form, user_id=user_id)", "async def setuserinfo(self, ctx, server: str, user_uuid: str, user_intid: str):\n self.settings.setUserInfo(server, user_uuid, user_intid)\n await ctx.send(inline('Done'))", "def edit_show_user(user_id):\n edited_user = User.query.get_or_404(user_id)\n\n edited_user.first_name = request.form['first_name']\n edited_user.last_name = request.form['last_name']\n edited_user.image_url = request.form['image_url']\n\n db.session.add(edited_user)\n db.session.commit()\n\n return redirect('/')", "async def omar(self, ctx, user):\n user = user.replace(\"<\",\"\").replace(\">\",\"\").replace(\"@\",\"\").replace(\"!\",\"\")\n print(user)\n user_member = await ctx.guild.fetch_member(user)\n if user_member is not None:\n kick_channel = await ctx.guild.create_voice_channel(\"kicked\")\n await user_member.move_to(kick_channel, reason=\"you have been kicked by Omar.\")\n await kick_channel.delete()\n else:\n print(\"user invalid for omar()\")", "def __str__(self):\n return self.nickname", "def _onUserSettings(self, event):\n dialog = sc.UserSettingsDialog(self)\n if dialog.ShowModal() == wx.ID_OK:\n dialog.saveSettings()\n dialog.Destroy()\n \n sel = self.userbox.GetSelection()\n usernames = ['None'] + c.getUserNames()\n self.userbox.SetItems(usernames)\n self.userbox.SetSelection(sel)", "def update_user(username):\n try:\n member = Member.objects.get(username=username)\n except Member.DoesNotExist:\n pass\n else:\n member.save()", "def userJoined(self, user, channel):\n ss = self.findSessions(channel)[0]\n user = user.decode(ss.encoding)\n r = ss.addNick(user)\n self.sendResponse(r)" ]
[ "0.776619", "0.77194846", "0.7687911", "0.7260133", "0.7148028", "0.70996314", "0.7096527", "0.70569235", "0.7005244", "0.7001775", "0.69678086", "0.6683608", "0.6653393", "0.6652248", "0.6580551", "0.64776564", "0.63855416", "0.6336465", "0.6322602", "0.6313675", "0.62873036", "0.62786555", "0.6183239", "0.61766505", "0.6162467", "0.61622643", "0.6114756", "0.61043", "0.609861", "0.6088752", "0.599975", "0.59648776", "0.59624314", "0.59548473", "0.59462065", "0.5942442", "0.5940937", "0.59344345", "0.5906818", "0.588825", "0.5830249", "0.58184093", "0.58156717", "0.57688177", "0.5768417", "0.57676953", "0.5764266", "0.5755436", "0.5743136", "0.5738721", "0.57262135", "0.5725145", "0.5717317", "0.56862354", "0.56852347", "0.5683087", "0.5647597", "0.56392884", "0.5636468", "0.5623809", "0.56068414", "0.5606647", "0.5601927", "0.55871946", "0.5580598", "0.557527", "0.5575175", "0.55702066", "0.5548261", "0.55305445", "0.5519626", "0.5509695", "0.5509302", "0.5504132", "0.54973775", "0.54875916", "0.54836935", "0.5477924", "0.5475466", "0.5474827", "0.5474627", "0.54586285", "0.545262", "0.5447483", "0.5445658", "0.54410875", "0.54358053", "0.5434858", "0.54172945", "0.54172945", "0.5409318", "0.54086375", "0.540242", "0.539921", "0.53958863", "0.5384988", "0.536172", "0.5361072", "0.53559184", "0.5354666" ]
0.78686786
0
Do I love the cake or nah?
async def cake_love( cake_name: ('str', 'Please pick a cake.') ): return f'Hmmm, yes, I love {cake_name} {EMOJI_CAKE} as well.'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hey(self, msg):\n if issilence(msg):\n return \"Fine. Be that way.\"\n elif isshouting(msg):\n return \"Woah, chill out!\"\n elif isquestion(msg):\n return \"Sure.\"\n else:\n return \"Whatever.\"", "def cheer(self, songs):\n if self.favourite_song in songs:\n return \"Whoo!\"", "def isTrueConstant(self, sentence):", "def hey(self, sentence=\"\"):\n if sentence == \"\" or sentence.replace(\" \", \"\") == \"\":\n return \"Fine. Be that way!\"\n if sentence.isupper():\n return \"Woah, chill out!\"\n if sentence[-1] == \"?\":\n return \"Sure.\"\n return \"Whatever.\"", "def hey(what):\n if what == what.upper() and not only_numbers(what):\n return 'Whoa, chill out!'\n if what[-1:] == '?':\n return 'Sure.'\n if only_silence(what):\n return 'Fine. Be that way!'\n return 'Whatever.'", "def hey(request):\n if _is_nothing(request):\n return 'Fine. Be that way!'\n # Yells are more impactful than questions,\n # so a yelled question gets a yell response\n if _is_yell(request):\n return 'Woah, chill out!'\n if _is_question(request):\n return 'Sure.'\n return 'Whatever.'", "def one_v_one(description):\n return \"forebrain\" in description", "def hey(self, string):\n\n if self.nothing(string):\n return \"Fine. Be that way!\"\n\n if self.yelling(string):\n return \"Woah, chill out!\"\n\n if self.question(string):\n return \"Sure.\"\n\n if self.anything(string):\n return \"Whatever.\"", "def life_sentence(x): \n if x == 'Life':\n return 1\n else:\n return 0", "def hey(phrase):\n\n # Bob has spent years building up his knowledge base. He knows\n # four whole things.\n knowledge = (nostatement, anger, question, easteregg)\n\n # Loop through all of his thoughts. Bob isn't much of a multi-tasker\n for thought in knowledge:\n response = thought(phrase)\n\n # if the thought registered, spit it out! Don't think to hard!\n if response:\n return response\n\n # If he didn't understand what you said, he gives up and returns\n # to being dumb.\n return 'Whatever.'", "def is_lyrics_approved():", "def life_or_death_sentence(x): \n if x == 'Life' or x == 'Death':\n return 1\n else:\n return 0", "def customer_wants_condiments(self):\n answer = raw_input(\"Would you like Lemon? (y/n)\").lower()\n if answer.startswith('y'):\n return True\n else:\n return False", "def won():\n # Oops, it's one, not won. Let's deprecate this and get it right.\n return 1", "def _likely_yelling_in(content):\n return (\n # All upper case is yelling\n (content == content.upper())\n \n # But no letters at all means otherwise\n and (content.lower() != content.upper()) \n )", "def check_for_greeting(sentence):\n for word in sentence.words:\n if word.lower() in greetings:\n return True", "async def best():\n await bot.say('Nargacuga is the best Monster. Are you casual?')", "def test_get_opposite_meaning_add(self):\n\t\tobj_ut = sentiment.get_opposite_meaning(\n\t\t\t\"good\")\n\t\tself.assertEqual(obj_ut, \n\t\t\t\"(not|dont|cant|wont|couldnt|shouldnt|never) (\\w+ ){0,2} ?good\")", "def hasConstantForm(self, sentence):", "def is_cool(name):\n if (name == \"Joe\") or (name == \"John\") or (name == \"Stephen\"):\n return True\n else:\n return False", "def healthy(food, is_healthy):\n if not isinstance(is_healthy, bool):\n raise ValueError('is_healthy must be a bool')\n ending = 'because my body is a temple'\n\n if not is_healthy:\n return \"I'm eating {} because YOLO\".format(food)\n return \"I'm eating {} {} \".format(food, ending)", "def hey(sentence):\n if not sentence.strip():\n answer = 'Fine. Be that way!'\n elif sentence.isupper():\n answer = 'Woah, chill out!'\n elif sentence.endswith(\"?\"):\n answer = 'Sure.'\n else:\n answer = 'Whatever.'\n return answer", "def substantiate():", "def goal_test(c):\n return c == GOAL_CUBE", "async def wherearemypants():\n await bot.say('justin is a known pants thief. Not saying he took them but he totally probably took them')", "def canned():\n return (next_phrase(\"we proceed as follows\") |\n (next_word('the') + \n first_word('result lemma theorem proposition corollary') +\n next_word('now').possibly() +\n next_word('follows')) |\n next_phrase('the other cases are similar') |\n (next_phrase('the proof is')+ first_word('obvious trivial easy routine'))).nil().expect('canned')", "def test_get_opposite_meaning_subtract(self):\n\t\tobj_ut = sentiment.get_opposite_meaning(\n\t\t\t\"(not|dont|cant|wont|couldnt|shouldnt|never) (\\w+ ){0,2} ?good\")\n\t\tself.assertEqual(obj_ut, \"good\")", "def wife(backpack):\n print(\"\\nYour wife says: \")\n if \"corn\" in backpack:\n if backpack['corn'][0] < 20:\n print(\"-You need to gather 20 corn cob so get back to work! \")\n enter()\n else:\n print(\"-Ahh you are a bastard but I know your dream...\\nNow go to city and buy your ticket my love :* \")\n enter()\n return True # because of this we can change lvl\n if \"corn\" not in backpack:\n print(\"-Where have u been u f...... drunkard, \\nget back to work and collect 20 corn cobs! \")\n enter()", "def hey(phrase):\n\n phrase = phrase.strip()\n question = re.compile(r'.*\\?$')\n letters = ''.join([l for l in phrase if re.match(r'\\w', l)])\n yelling = False\n\n if (letters.isupper() is True):\n yelling = True\n\n if (yelling is True):\n if (question.match(phrase)):\n return \"Calm down, I know what I'm doing!\"\n else:\n return 'Whoa, chill out!'\n\n if (question.match(phrase)):\n return 'Sure.'\n\n if (letters.strip() == ''):\n return 'Fine. Be that way!'\n\n return 'Whatever.'", "def test_get_good_evening():\n assert get_greetings.get_good_evening().upper() == \"GOOD EVENING!\"", "def hey(what):\n\tif len(what) == 0 or what.isspace():\n\t\treturn \"Fine. Be that way!\"\n\t\"\"\"Checks if string is in upper case(Yelling)\"\"\"\n\tif what.isupper():\n\t\treturn \"Whoa, chill out!\"\n\t\"\"\"Iterates through string backwards looking for a ?, stopping if a non-\n\twhitespace character is found(Question)\"\"\"\n\tfor character in reversed(what):\n\t\tif character == '?':\n\t\t\treturn \"Sure.\"\n\t\tif character != \" \":\n\t\t\tbreak\n\t\"\"\"Catch all response for any other input\"\"\"\n\treturn \"Whatever.\"", "def think(s):", "def yes_straw_warts():\n check50.run(\"python3 palindrome.py\"\n ).stdout(\"Word? \", regex=False\n ).stdin(\"straw warts\", prompt=False\n ).stdout(\"YES\", regex=False\n ).exit()", "def opinion():\n pass", "def isspeech(phone):\n return phone not in OTHERS", "def happiness(self):\n return ( self.girl.happiness())\n # self.boy.happiness(self.girl) +", "def decide():", "def yes():\n return 42", "def death_sentence(x): \n if x == 'Death':\n return 1\n else:\n return 0", "def basic_check(word):\n if word[-1] == \"b\" or word[-1] == \"g\":\n return False\n consonant_counter = 0\n for char in word:\n if char in VOWELS:\n consonant_counter = 0\n else:\n consonant_counter += 1\n if consonant_counter >= 3:\n return False\n return True", "def can_cool(self) -> bool:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"can_cool\"))\r\n return True", "def yes_tenet():\n check50.run(\"python3 palindrome.py\"\n ).stdout(\"Word? \", regex=False\n ).stdin(\"tenet\", prompt=False\n ).stdout(\"YES\", regex=False\n ).exit()", "def test_theft_and_stealing(self):", "def is_hungry(self) -> bool:\n if self.eat_count <= 3:\n return True\n else:\n return False", "def are_ere_future_quiz(verb, pronoun):\n return functions.conjugate_future_are_ere_verb(verb, pronoun, \"futuro\")", "def yell():\n ground_description_int = GROUND_FEATURES_LIST[ZERO_BASE_PLYR_POS]\n if ground_description_int != 12:\n printmessage(\"You yell, but nobody hears you.\", 5, CYAN, 1)\n else:\n printmessage(\"You have found the ranger, amd won the game!\", 5, GREEN, 3)\n die(\"ranger\")", "def checkFood(self, food):\n pass", "def is_cooling(action_data):\n return (action_data == COOLING_ACTION) | (action_data == TWO_STAGE_COOLING_ACTION)", "def is_offensive(drug_name, bad_words):\n\n for bad_word in bad_words:\n if bad_word in drug_name:\n return True\n return False", "def is_equivalence(self) -> bool:", "def is_trained(self) -> bool:", "def verb_ending_good(verb):\n try:\n is_verb = verb[-3:]\n if is_verb in [\"are\", \"ere\", \"ire\"]:\n return True\n else:\n return False\n except TypeError:\n raise TypeError", "def starve_checker(hunger):\n death_chance = -30\n hunger -= 1\n\n if (death_chance * (hunger-1)) > random.randint(1,100):\n death = True\n else:\n color.write(\"Somehow, through divine intervention, you manage to survive though the pain, although you know that the end is near. You should definitely eat something.\\n\",\"ERROR\")\n death = False\n return death", "def answer(self) -> bool:", "def areYouPlayingBanjo(name):\n if name[:1] == 'R' or name[:1] == 'r':\n return name + \" plays banjo\"\n return name + \" does not play banjo\"", "def getStatusMessage():\n\n now = datetime.datetime.now()\n hour = now.hour\n mood = Sentience.getPrimaryMood()\n exp_mood = Sentience.getExposedPositivity()\n\n random.seed((time.time()//86400*86400))\n\n #sleeping\n if not (9 <= hour < 21) and (mood <= 0.5 or not 7 <= hour < 23):\n if exp_mood < -0.1:\n return random.choice([\n \"bleh\",\n \"not sleeping well\",\n \"why's chat so noisy\",\n \"can't sleep\",\n \"do not disturb pls thx\",\n ])\n\n if mood < 0:\n return random.choice([\n \"crying myself to sleep rn\",\n \":(\",\n \"had a nightmare\",\n \"can't sleep\",\n \"._.\"\n ])\n\n return random.choice([\n \"zzz...\",\n \"sweet dreams\",\n \"good night\",\n \"sleeping...\",\n \"having some rest\"\n ])\n\n if Sentience.isExposedPositivityOverloaded():\n return random.choice([\n \"i'm done\",\n \"too much\"\n \"goodbye\",\n \"tired\",\n \"need rest\",\n ])\n\n #happy\n if mood >= 0.7:\n return random.choice([\n \":D\",\n \"great day\",\n \"happy happy\",\n \"hehe\",\n \"good times\",\n \"yay\",\n \"what's up\",\n \"happiness\",\n \"nice day\",\n ])\n #moody-ish\n if mood >= 0.4:\n return random.choice([\n \"hmm\",\n \"yeet\",\n \"bleh\",\n \"oh\",\n \"moody rn\",\n \"nothing\"\n ])\n #more moody\n if mood >= -0.3:\n return random.choice([\n \"moody rn\",\n \"not happy\",\n \"i'm fine.\",\n \"bleh\",\n \"._.\",\n \":(\",\n ])\n #very unhappy\n return random.choice([\n \"sad\",\n \"cries\",\n \"roar\",\n \":_(\",\n \">:(\",\n \"mad\",\n \"angry\",\n \"I'M FINE.\",\n \"bleh\",\n \"no\",\n ])", "def check_for_comment_about_bot(pronoun, noun, adjective):\n resp = None\n if pronoun == 'I' and (noun or adjective):\n pprint(\"WORRRRRRRRRRKING\")\n if noun:\n if random.choice((True, False)):\n resp = random.choice(SELF_VERBS_WITH_NOUN_CAPS_PLURAL).format(**{'noun': noun.pluralize().capitalize()})\n else:\n resp = random.choice(SELF_VERBS_WITH_NOUN_LOWER).format(**{'noun': noun})\n else:\n resp = random.choice(SELF_VERBS_WITH_ADJECTIVE).format(**{'adjective': adjective})\n return resp", "def isTrueOrDoesSentence(self, sentence):\n name = sentence.__name__\n return name == GdlPool.TRUE or name == GdlPool.DOES", "def know_what_i_mean(wink, numwink=2):\n winks = (wink * numwink).strip()\n nudges = ('nudge ' * numwink).strip()\n retstr = 'Know what I mean? {}, {}'.format(winks, nudges)\n return retstr", "def is_consonant(text):\n return text.lower() in AVRO_CONSONANTS", "def test_effect(self):\n self.check_search(\n dict(similar_to=u'icy wind'),\n [ u'Bubble', u'BubbleBeam', u'Constrict',\n u'Icy Wind', u'Mud Shot', u'Rock Tomb' ],\n 'searching by effect',\n exact=True,\n )\n self.check_search(\n dict(similar_to=u'splash'),\n [u'Splash'],\n 'searching by unique effect',\n exact=True,\n )", "def is_rainbow(msg: str = 'I guess you are not my little pog champ :3'):\n\n async def check(ctx):\n rainbow = ctx.author.id == ctx.bot.owner_id\n if not rainbow:\n await ctx.send(msg)\n return rainbow\n\n return commands.check(check)", "def is_artificial(self):\n\t\treturn 0", "def test_third_equal(self):\n self.assertEqual(heaviest_word(\"take me to semynak\"), \"semynak\")", "def isIceAct(string, pos):\n return string == 0 and pos == 1", "def _is_happy(cake_stack):\n if re.search('^\\++$', cake_stack) is not None:\n return True\n else:\n return False", "def is_an_oak(name):\n if 'quercus' in name.lower():\n return True\n else:\n return False", "def est_fruit(self): \n if self.age > 20 and self.age <31 and (self.fecondee==True):\n return True\n else:\n return False", "def is_virtual(entry):\n\n if entry.get('text', '') == '':\n return 'No'\n\n # search for Invasion split cards\n regex = search('\\[This is half of the split card (.+)\\]', entry['text'])\n if regex is not None:\n return 'Yes: ' + regex.group(1)\n\n # search for Kamigawa flip cards\n regex = search('\\[Flips from (.+)\\]', entry['text'])\n if regex is not None:\n return 'Yes: ' + regex.group(1)\n\n # search for Innistrad shapeshifters\n regex = search('\\[(|.+)Back face. Transforms into (.+)\\.\\]', entry['text'])\n if regex is not None:\n return 'Yes: ' + regex.group(2)\n\n return 'No'", "def is_food_in_set(sentence, possible_foods):\n for token in sentence.split(\" \"):\n if token in possible_foods:\n return token\n return None", "def __isVerb__(self, word):\n self.verbs = ('go', 'stop', 'kill', 'eat')\n for verb in self.verbs:\n if verb == word:\n return ('verb', word), True\n return None, False", "def won(s,n):", "def is_voiced(self, kana):\n return self.to_consonant_line(kana) in self.voiced_consonants", "def hey(content):\n pending_response = _RESPONSE['dismissive']\n if not content or not content.strip():\n # For empty content, return the expected string\n pending_response = _RESPONSE['meh']\n elif content[-1] == '?' and not _likely_yelling_in(content):\n # If you end with a ?, interpret it as a question.\n pending_response = _RESPONSE['non-committal']\n elif _likely_yelling_in(content):\n # If you provide a !, interpret it as yelling.\n pending_response = _RESPONSE['defensive']\n return pending_response", "def negation_check(self,sentence):", "def dead(self):\n if self.__scramble.score == 0: # if the score is 0 it's a lose\n return \"You Lose!\"\n elif self.__scramble.sentance == self.__scramble.scramble: # if the scramble sentence is solved it's a win\n return \"You Win!\"\n else:\n return True # otherwise we cant decide yet", "def test_it_can_be_old():\n larry = Hobbit(\"Larry\")\n larry.age = 100\n assert larry.is_adult() is True\n assert larry.is_old() is False\n larry.celebrate_birthday()\n assert larry.is_old() is True", "def asking(self):\n return 'Sure.'", "def doNotTrack(self):\n # return False\n return 'lol'", "def willShrinkThisTurn(city):\n\treturn city.getPopulation() > 1 and city.getFood() + city.foodDifference(True) < 0", "def test_second_equal(self):\n self.assertEqual(heaviest_word(\"what time are we climbing up to the volcano\"), \"volcano\")", "def give_names(x): \n if x == 0:\n return 'Lost'\n else:\n return 'Won/Broke Even'", "def ask_to_continue():\n\n bored = raw_input(\"Do you want another suggestion?(yes/no) \").lower()\n\n if bored == 'no':\n print\n print \"Great have fun!\"\n return False\n\n return True", "def is_hot(self):\n return 'Hot' in self.name", "def tt_entails(knowledge_base, sentence):\n return False", "def check_happiness(content):\n baseline = 100\n happiness = [\"Smile\"]\n label = 5\n # print content\n emotion_time = content[0][1]\n # print 'emotion_time',emotion_time\n for c in content:\n for h in happiness:\n # print h\n if c[0] == h and c[1] >= baseline:\n print 'emotion & label',emotion_time, label\n return emotion_time, label", "def is_pronounced(word):\n pronounced = [\"pronounced\", \"pronunciation\", \"pronounsed\", \"pronouced\", \"pronouned\", \\\n \"pronounciated\", \"prenounced\", \"prounouced\", \"pernounced\", \"purnounced\", \\\n \"pronoused\", \"pronuced\", \"pronunced\", \"pronnounced\", \"pronanced\", \\\n \"prononced\", \"prounounced\", \"prononsed\", \"prononuced\", \"pernunciation\", \\\n \"prononciation\", \"prounciation\", \"pronouciation\", \"pronounciated\", \\\n \"pronounciation\", \"pronanciation\", \"prononcation\", \"pernounciation\", \\\n \"prononceation\", \"prenunciation\", \"prononseation\", \"prounouciation\", \\\n \"pronuniation\", \"pronunication\", \"prenounciation\", \"pronuntiation\", \\\n \"pronuncition\", \"pronociation\", \"prenunsiation\", \"pronounsation\", \\\n \"pronounceation\", \"pronounication\", \"pronauciation\", \"pronounciacion\", \\\n \"pronounsiation\"]\n for p in pronounced:\n if word.lower() == p:\n return True\n return False", "def is_it_ingredient(word):\n reject_synsets = ['meal.n.01', 'meal.n.02', 'dish.n.02', 'vitamin.n.01']\n reject_synsets = set(wordnet.synset(w) for w in reject_synsets)\n accept_synsets = ['food.n.01', 'food.n.02']\n accept_synsets = set(wordnet.synset(w) for w in accept_synsets)\n for word_synset in wordnet.synsets(word, wordnet.NOUN):\n all_synsets = set(word_synset.closure(lambda s: s.hypernyms()))\n all_synsets.add(word_synset)\n for synset in reject_synsets:\n if synset in all_synsets:\n return False\n for synset in accept_synsets:\n if synset in all_synsets:\n return True", "def check_happiness(content):\n baseline = 50\n happiness = [\"Smile\"]\n label = 5\n # print content\n emotion_time = content[0][1]\n # print 'emotion_time',emotion_time\n for c in content:\n for h in happiness:\n # print h\n if c[0] == h and c[1] >= baseline:\n print 'emotion & label',emotion_time, label\n return emotion_time, label", "def match(self, sentence) -> bool:\r\n pass", "def brain_status(self):\r\n return 'thinking...'", "async def favor(self, ctx):\n east = ctx.guild.get_member(339119069066297355)\n if not east or east.status != discord.Status.online:\n await ctx.send(f\"I'm afraid I can't do that, {ctx.author.display_name}.\")\n return\n await ctx.send(\"&East, could I ask you for a favor? I need someone to verify my code.\")\n await asyncio.sleep(2)\n async with ctx.typing():\n await asyncio.sleep(1)\n await ctx.send(\"Oh my. Well, if you insist ;)\")", "def conjugate_present_are_verb(verb, pronoun, tense):\n\n are_endings = {\"io\": \"o\", \"tu\": \"i\", \"lui\": \"a\", \"lei\": \"a\", \"noi\": \"iamo\", \"voi\": \"ate\", \"loro\": \"ano\"}\n giare_endings = {\"io\": \"io\", \"tu\": \"i\", \"lui\": \"ia\", \"lei\": \"ia\", \"noi\": \"iamo\", \"voi\": \"iate\", \"loro\": \"iano\"}\n ciare_endings = {\"io\": \"o\", \"tu\": \"\", \"lui\": \"a\", \"lei\": \"a\", \"noi\": \"amo\", \"voi\": \"ate\", \"loro\": \"ano\"}\n add_h = {\"io\": \"o\", \"tu\": \"hi\", \"lui\": \"a\", \"lei\": \"a\", \"noi\": \"hiamo\", \"voi\": \"ate\", \"loro\": \"ano\"}\n irregular_are = [\"fare\", \"andare\"]\n fare = {\"io\": \"faccio\", \"tu\": \"fai\", \"lei\": \"fa\", \"lui\": \"fa\", \"noi\": \"facciamo\", \"voi\": \"fate\", \"loro\": \"fanno\"}\n andare = {\"io\": \"vado\", \"tu\": \"vai\", \"lui\": \"va\", \"lei\": \"va\", \"noi\": \"andiamo\", \"voi\": \"andate\", \"loro\": \"vanno\"}\n\n # this section checks for the irregular verbs fare, andare\n if verb in irregular_are:\n if verb == \"fare\":\n return fare[pronoun]\n else:\n return andare[pronoun]\n\n # this section checks for spelling issues like with mancare in order to preserve hard \"k\" sound of infinitive\n # if it's a verb like mancare then the if section adds an \"h\" for the spelling to preserve hard \"k\" sound\n # if it's a normal -are verb, then the else section conjugates it normally\n if verb[-5:] == \"giare\":\n stripped_verb = strip_off_ending(verb, tense)\n new_verb = stripped_verb + giare_endings[pronoun]\n return new_verb\n if verb[-5:] == \"ciare\":\n stripped_verb = strip_off_ending(verb, tense)\n new_verb = stripped_verb + ciare_endings[pronoun]\n return new_verb\n if verb[-4:] == \"care\":\n stripped_verb = strip_off_ending(verb, tense)\n new_verb = stripped_verb + add_h[pronoun]\n return new_verb\n if verb[-4:] == \"gare\":\n stripped_verb = strip_off_ending(verb, tense)\n new_verb = stripped_verb + add_h[pronoun]\n return new_verb\n else:\n stripped_verb = strip_off_ending(verb, tense)\n new_verb = stripped_verb + are_endings[pronoun]\n return new_verb", "def goodVsEvil(good, evil):\n good_list = [int(num) for num in good.split(' ')]\n evil_list = [int(num) for num in evil.split(' ')]\n good_dict = {'Hobbits': 1, 'Men': 2, 'Elves': 3, 'Dwarves': 3, 'Eagles': 4, 'Wizards': 10}\n evil_dict = {'Orcs': 1, 'Men': 2, 'Wargs': 2, 'Goblins': 2, 'Uruk Hai': 3, 'Trolls': 5, 'Wizards': 10}\n good_sum = sum([num1 * num2 for num1, num2 in zip(list(good_dict.values()), good_list)])\n evil_sum = sum([num1 * num2 for num1, num2 in zip(list(evil_dict.values()), evil_list)])\n if good_sum > evil_sum:\n return 'Battle Result: Good triumphs over Evil'\n elif good_sum < evil_sum:\n return 'Battle Result: Evil eradicates all trace of Good'\n else:\n return 'Battle Result: No victor on this battle field'", "def closet():\n\tprint \"\"\"\nYou approach the closet and notice that the door handle is broken.\n\"\"\"\n\t\n\topen_closet = raw_input(\"> \")\n\t\n\tif \"open\" in open_closet:\n\t\tprint \"\"\"\nYou see an open coffin with a vampire inside.\n\"\"\"\n\t\tvampire()\n\n\telif vampire == \"back\":\n\t\tgreat_hall_return()\n\n\telse:\n\t\tprint \"\"\"\nYou can't get up the nerves to open the door.\n\"\"\"\n\t\tgreat_hall_return()", "def verse_2():\n print(\"Old MacDonald had a farm\")\n print(\"E-I-E-I-O\")", "def testNSESanityChecks(self):\n self.assertEqual(100, self.c.get_species_richness())\n self.assertEqual(67, self.c2.get_species_richness())", "def mystery_solved():\n print(\"\\nThe butler: The mystery is solved! I knew it was someone in the family. Well done!\")", "def test_genius(self):\n bad_res = lw.get_lyrics('genius', 'eminem', 'los yourself')\n good_res = lw.get_lyrics('genius', 'eminem', 'lose yourself')\n self.assertEqual(bad_res, 404)\n self.assertTrue(good_res)", "def beats(self, one, two):\n return ((one == 'rock' and two == 'scissors') or\n (one == 'scissors' and two == 'paper') or\n (one == 'paper' and two == 'rock'))" ]
[ "0.6295088", "0.6273097", "0.6258943", "0.6227304", "0.6204726", "0.61615497", "0.6159582", "0.59585387", "0.5934847", "0.59218514", "0.59041053", "0.58519816", "0.58450353", "0.5841008", "0.5827716", "0.5822664", "0.58197415", "0.58001065", "0.5781325", "0.57723594", "0.5735134", "0.5733278", "0.57068485", "0.570291", "0.5670335", "0.56467193", "0.56408805", "0.5638273", "0.5632236", "0.5597965", "0.55828196", "0.5569869", "0.5563169", "0.55515724", "0.5549378", "0.5545371", "0.55040663", "0.55023575", "0.5500308", "0.5489275", "0.5485628", "0.54836845", "0.5483559", "0.5480827", "0.5480582", "0.5478674", "0.5465665", "0.546175", "0.546136", "0.5446877", "0.54445183", "0.5428593", "0.5424444", "0.5415794", "0.54072964", "0.5401482", "0.5395601", "0.53796214", "0.5372426", "0.53699493", "0.53661484", "0.5351161", "0.5348616", "0.5334582", "0.53275836", "0.53252614", "0.532345", "0.531368", "0.5303506", "0.530219", "0.5300351", "0.52853984", "0.52777773", "0.52670527", "0.5255893", "0.5251062", "0.52452207", "0.52407897", "0.52321625", "0.52320486", "0.52317995", "0.52224857", "0.52169347", "0.5216667", "0.5215622", "0.5214909", "0.52147806", "0.52117765", "0.52010167", "0.5200069", "0.5197969", "0.51895225", "0.5186653", "0.5186231", "0.51844823", "0.5182867", "0.5178183", "0.51658344", "0.516171", "0.51591283" ]
0.5283887
72
Uses the selected spell
async def cast( event, spell: ('str', 'select a spell'), ): spell = get_spell_or_abort(spell) return f'{event.user:f} just used {spell}; It is super effective!'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def func(self):\n spell_list = sorted(SPELLS.keys())\n args = self.args.lower()\n args = args.strip(\" \")\n caller = self.caller\n spell_to_learn = []\n\n if not args or len(args) < 3: # No spell given\n caller.msg(\"Usage: learnspell <spell name>\")\n return\n\n for spell in spell_list: # Match inputs to spells\n if args in spell.lower():\n spell_to_learn.append(spell)\n\n if spell_to_learn == []: # No spells matched\n caller.msg(\"There is no spell with that name.\")\n return\n if len(spell_to_learn) > 1: # More than one match\n matched_spells = \", \".join(spell_to_learn)\n caller.msg(\"Which spell do you mean: %s?\" % matched_spells)\n return\n\n if len(spell_to_learn) == 1: # If one match, extract the string\n spell_to_learn = spell_to_learn[0]\n\n if spell_to_learn not in self.caller.db.spells_known: # If the spell isn't known...\n caller.db.spells_known.append(spell_to_learn) # ...then add the spell to the character\n caller.msg(\"You learn the spell '%s'!\" % spell_to_learn)\n return\n if spell_to_learn in self.caller.db.spells_known: # Already has the spell specified\n caller.msg(\"You already know the spell '%s'!\" % spell_to_learn)\n \"\"\"\n You will almost definitely want to replace this with your own system\n for learning spells, perhaps tied to character advancement or finding\n items in the game world that spells can be learned from.\n \"\"\"", "def cast_spell(self, magic):\n title = 'Select type of spell to cast'\n options = self.stats['powers'].keys()\n option_display = []\n for power in options:\n option_display.append(\n \"%s x %d\" % (\n power,\n self.stats['powers'][power]\n )\n )\n choice = self.present_menu(option_display, title)\n self.stats['powers'][options[choice]] -= 1\n if self.stats['powers'][options[choice]] == 0:\n self.stats['powers'].pop(options[choice], None)\n if options[choice] in self.stats['opponent']['powers']:\n print(\n 'A %s spell increases your magic during this trial' % (\n options[choice]\n )\n )\n magic += 1\n print('Casting a spell...')\n self.stats['active'].append(self.stats['hand'].pop())\n return magic", "def __spellCheck(self):\n aw = self.activeWindow()\n if aw:\n aw.checkSpelling()", "def choose_word():\n pass", "def cast_spell(self, mp_needed: int, spell: str) -> str:\n if self.mp >= mp_needed:\n new_mp = self.mp - mp_needed\n self.mp = new_mp\n return f'{self.name} has successfully cast {spell} and now has {self.mp} MP!'\n return f'{self.name} does not have enough MP to cast {spell}!'", "def food_selected(self, arg):\n\t\tfood = fooditemdao.retrieve_food(self.selected_food.get())\n\t\tself.lbl_unit.config(text=food.info['unit'])", "def option_changed(self,*args):\r\n self.word = self.var.get()\r\n self.find_word(self.word)", "def unlock(self, with_spell=False):\n\n if not with_spell:\n print(\"You need to cast a spell first.\")\n else:\n print(with_spell)\n super(DoorNeedingSpell, self).unlock()", "def openSpellTab(self, event: Event = None) -> None:\n if g.unitTesting:\n return\n c = self.c\n log = c.frame.log\n tabName = 'Spell'\n if log.frameDict.get(tabName):\n log.selectTab(tabName)\n else:\n log.selectTab(tabName)\n self.handler = SpellTabHandler(c, tabName)\n # Bug fix: 2013/05/22.\n if not self.handler.loaded:\n log.deleteTab(tabName)\n # spell as you type stuff\n self.suggestions: list[str] = []\n self.suggestions_idx: int = None\n self.word: str = None\n self.spell_as_you_type = False\n self.wrap_as_you_type = False", "def my_spell(word):\r\n from autocorrect import spell\r\n \r\n corrected_word = ''\r\n rescued_typo = 0\r\n \r\n if len(word)>1: # one letter word are not considered \r\n \r\n # try to correct typo\r\n if is_typo(word): \r\n print('typo: ' + word)\r\n word = spell(word)\r\n print('autocorrected typo: ' + word)\r\n\r\n if not is_typo(word): \r\n rescued_typo = 1\r\n corrected_word = word\r\n else:\r\n corrected_word = word\r\n\r\n return corrected_word, rescued_typo", "def spell_correction(self, tweet):\n return self.spell_correct.correct(tweet)", "def func(self):\n caller = self.caller\n\n if not self.lhs or len(self.lhs) < 3: # No spell name given\n caller.msg(\"Usage: cast <spell name> = <target>, <target2>, ...\")\n if not caller.db.spells_known:\n caller.msg(\"You don't know any spells.\")\n return\n else:\n caller.db.spells_known = sorted(caller.db.spells_known)\n spells_known_msg = \"You know the following spells:|/\" + \"|/\".join(\n caller.db.spells_known\n )\n caller.msg(spells_known_msg) # List the spells the player knows\n return\n\n spellname = self.lhs.lower() # noqa - not used but potentially useful\n spell_to_cast = []\n spell_targets = []\n\n if not self.rhs:\n spell_targets = []\n elif self.rhs.lower() in [\"me\", \"self\", \"myself\"]:\n spell_targets = [caller]\n elif len(self.rhs) > 2:\n spell_targets = self.rhslist\n\n for spell in caller.db.spells_known: # Match inputs to spells\n if self.lhs in spell.lower():\n spell_to_cast.append(spell)\n\n if spell_to_cast == []: # No spells matched\n caller.msg(\"You don't know a spell of that name.\")\n return\n if len(spell_to_cast) > 1: # More than one match\n matched_spells = \", \".join(spell_to_cast)\n caller.msg(\"Which spell do you mean: %s?\" % matched_spells)\n return\n\n if len(spell_to_cast) == 1: # If one match, extract the string\n spell_to_cast = spell_to_cast[0]\n\n if spell_to_cast not in SPELLS: # Spell isn't defined\n caller.msg(\"ERROR: Spell %s is undefined\" % spell_to_cast)\n return\n\n # Time to extract some info from the chosen spell!\n spelldata = SPELLS[spell_to_cast]\n\n # Add in some default data if optional parameters aren't specified\n if \"combat_spell\" not in spelldata:\n spelldata.update({\"combat_spell\": True})\n if \"noncombat_spell\" not in spelldata:\n spelldata.update({\"noncombat_spell\": True})\n if \"max_targets\" not in spelldata:\n spelldata.update({\"max_targets\": 1})\n\n # Store any superfluous options as kwargs to pass to the spell function\n kwargs = {}\n spelldata_opts = [\n \"spellfunc\",\n \"target\",\n \"cost\",\n \"combat_spell\",\n \"noncombat_spell\",\n \"max_targets\",\n ]\n for key in spelldata:\n if key not in spelldata_opts:\n kwargs.update({key: spelldata[key]})\n\n # If caster doesn't have enough MP to cover the spell's cost, give error and return\n if spelldata[\"cost\"] > caller.db.mp:\n caller.msg(\"You don't have enough MP to cast '%s'.\" % spell_to_cast)\n return\n\n # If in combat and the spell isn't a combat spell, give error message and return\n if spelldata[\"combat_spell\"] is False and self.rules.is_in_combat(caller):\n caller.msg(\"You can't use the spell '%s' in combat.\" % spell_to_cast)\n return\n\n # If not in combat and the spell isn't a non-combat spell, error ms and return.\n if spelldata[\"noncombat_spell\"] is False and self.rules.is_in_combat(caller) is False:\n caller.msg(\"You can't use the spell '%s' outside of combat.\" % spell_to_cast)\n return\n\n # If spell takes no targets and one is given, give error message and return\n if len(spell_targets) > 0 and spelldata[\"target\"] == \"none\":\n caller.msg(\"The spell '%s' isn't cast on a target.\" % spell_to_cast)\n return\n\n # If no target is given and spell requires a target, give error message\n if spelldata[\"target\"] not in [\"self\", \"none\"]:\n if len(spell_targets) == 0:\n caller.msg(\"The spell '%s' requires a target.\" % spell_to_cast)\n return\n\n # If more targets given than maximum, give error message\n if len(spell_targets) > spelldata[\"max_targets\"]:\n targplural = \"target\"\n if spelldata[\"max_targets\"] > 1:\n targplural = \"targets\"\n caller.msg(\n \"The spell '%s' can only be cast on %i %s.\"\n % (spell_to_cast, spelldata[\"max_targets\"], targplural)\n )\n return\n\n # Set up our candidates for targets\n target_candidates = []\n\n # If spell targets 'any' or 'other', any object in caster's inventory or location\n # can be targeted by the spell.\n if spelldata[\"target\"] in [\"any\", \"other\"]:\n target_candidates = caller.location.contents + caller.contents\n\n # If spell targets 'anyobj', only non-character objects can be targeted.\n if spelldata[\"target\"] == \"anyobj\":\n prefilter_candidates = caller.location.contents + caller.contents\n for thing in prefilter_candidates:\n if not thing.attributes.has(\"max_hp\"): # Has no max HP, isn't a fighter\n target_candidates.append(thing)\n\n # If spell targets 'anychar' or 'otherchar', only characters can be targeted.\n if spelldata[\"target\"] in [\"anychar\", \"otherchar\"]:\n prefilter_candidates = caller.location.contents\n for thing in prefilter_candidates:\n if thing.attributes.has(\"max_hp\"): # Has max HP, is a fighter\n target_candidates.append(thing)\n\n # Now, match each entry in spell_targets to an object in the search candidates\n matched_targets = []\n for target in spell_targets:\n match = caller.search(target, candidates=target_candidates)\n matched_targets.append(match)\n spell_targets = matched_targets\n\n # If no target is given and the spell's target is 'self', set target to self\n if len(spell_targets) == 0 and spelldata[\"target\"] == \"self\":\n spell_targets = [caller]\n\n # Give error message if trying to cast an \"other\" target spell on yourself\n if spelldata[\"target\"] in [\"other\", \"otherchar\"]:\n if caller in spell_targets:\n caller.msg(\"You can't cast '%s' on yourself.\" % spell_to_cast)\n return\n\n # Return if \"None\" in target list, indicating failed match\n if None in spell_targets:\n # No need to give an error message, as 'search' gives one by default.\n return\n\n # Give error message if repeats in target list\n if len(spell_targets) != len(set(spell_targets)):\n caller.msg(\"You can't specify the same target more than once!\")\n return\n\n # Finally, we can cast the spell itself. Note that MP is not deducted here!\n try:\n spelldata[\"spellfunc\"](\n caller, spell_to_cast, spell_targets, spelldata[\"cost\"], **kwargs\n )\n except Exception:\n log_trace(\"Error in callback for spell: %s.\" % spell_to_cast)", "def pick_word(self):\n self.chosen_word = random.choice(self.words_list)\n return self.chosen_word", "def spell_a_word(cls, voice_transcript, skill, **kwargs):\n tags = cls._extract_tags(voice_transcript, skill['tags'])\n for tag in tags:\n reg_ex = re.search(tag + ' ([a-zA-Z]+)', voice_transcript)\n try:\n if reg_ex:\n search_text = reg_ex.group(1)\n for letter in search_text:\n cls.response(letter)\n time.sleep(2)\n except Exception as e:\n logging.debug(e)\n cls.response(\"I can't spell the word\")", "def changeThenFind(self, event: Event = None) -> None:\n if self.handler:\n self.openSpellTab()\n f = self.handler.changeThenFind\n f()\n else:\n self.openSpellTab()", "def _select_translation(self, ref, current, entered):\n s = entered\n if entered.lower() == \"y\":\n s = ref\n elif current and entered == \"\":\n s = current\n return s", "async def upgrade(\n event,\n spell: ('str', 'select a spell'),\n):\n spell = get_spell_or_abort(spell)\n \n return f'{event.user:f} just upgraded their {spell}; It was a *next* level move!'", "def as_you_type_onkey(self, tag: str, kwargs: Any) -> None:\n if kwargs['c'] != self.c:\n return\n if kwargs['ch'] not in '\\'\",.:) \\n\\t':\n return\n c = self.c\n spell_ok = True\n if self.spell_as_you_type: # might just be for wrapping\n w = c.frame.body.wrapper\n txt = w.getAllText()\n i = w.getInsertPoint()\n word = txt[:i].rsplit(None, 1)[-1]\n word = ''.join(i if i.isalpha() else ' ' for i in word).split()\n if word:\n word = word[-1]\n ec = c.spellCommands.handler.spellController\n suggests = ec.process_word(word)\n if suggests:\n spell_ok = False\n g.es(' '.join(suggests[:5]) +\n ('...' if len(suggests) > 5 else ''),\n color='red')\n elif suggests is not None:\n spell_ok = False\n g.es('[no suggestions]')\n self.suggestions = suggests\n self.suggestion_idx = 0\n self.word = word\n if spell_ok and self.wrap_as_you_type and kwargs['ch'] == ' ':\n w = c.frame.body.wrapper\n txt = w.getAllText()\n i = w.getInsertPoint()\n # calculate the current column\n parts = txt.split('\\n')\n popped = 0 # chars on previous lines\n while len(parts[0]) + popped < i:\n popped += len(parts.pop(0)) + 1 # +1 for the \\n that's gone\n col = i - popped\n if col > self.page_width:\n txt = txt[:i] + '\\n' + txt[i:] # replace space with \\n\n w.setAllText(txt)\n c.p.b = txt\n w.setInsertPoint(i + 1) # must come after c.p.b assignment", "def as_you_type_toggle(self, event: Event) -> None:\n if self.spell_as_you_type:\n self.spell_as_you_type = False\n if not self.wrap_as_you_type:\n g.unregisterHandler('bodykey2', self.as_you_type_onkey)\n g.es(\"Spell as you type disabled\")\n return\n self.spell_as_you_type = True\n if not self.wrap_as_you_type:\n g.registerHandler('bodykey2', self.as_you_type_onkey)\n g.es(\"Spell as you type enabled\")", "def secondary_effect(self, saved, caster, spell_effect):\n if saved:\n # print(\"Shoke off effect\")\n self.cleanup_effect(caster, spell_effect)\n else:\n pass", "def __setAutoSpellChecking(self):\n enabled = self.autoSpellCheckAct.isChecked()\n Preferences.setEditor(\"AutoSpellCheckingEnabled\", enabled)\n for editor in self.editors:\n editor.setAutoSpellChecking()", "def testPlaySpell(self):\n\n spell = self._prepare_card(C30007) # ็ซ็ƒๆœฏ\n\n self.assertEqual(spell.id, C30007)\n self.game.run_player_action(pa.PlaySpell(self.game, spell, self.p1.hero))\n\n self._assertEventType(GSE + [\n std_e.OnPlaySpell, std_e.SpellBenderPhase, std_e.SpellText, std_e.Damage, std_e.AfterSpell,\n ])", "def focusToSpell(self, event: Event = None) -> None:\n self.openSpellTab() # Makes Spell tab visible.\n # This is not a great idea. There is no indication of focus.\n # if self.handler and self.handler.tab:\n # self.handler.tab.setFocus()", "def use(target, name):\n out = target.damage() + \"\\n\"\n return out + \"You swing the \" + name + \" at \" + target.name", "def do_use(self, arg):\r\n itemToUse = arg.lower()\r\n \r\n if itemToUse == '':\r\n print('Use what? Type \"inv\" to see the items in your invetory.')\r\n return\r\n \r\n cantUse = False\r\n \r\n #look up the item the player describes\r\n invDescWords = getAllDescWords(inventory)\r\n \r\n if itemToUse not in invDescWords:\r\n print('You do not have that item to use it')\r\n return\r\n \r\n for item in getAllItemsMatchingDesc(itemToUse, inventory):\r\n if worldItems[item].get(USEABLE, True) == False:\r\n cantUse = True\r\n continue\r\n print('%s' % (worldItems[item][USEDESCTRUE]))\r\n #print('You use %s' % (worldItems[item][SHORTDESC]))\r\n #inventory.remove(item) \r\n return\r\n \r\n if cantUse:\r\n print('You cannot use \"%s\".' % (itemToUse))\r\n else:\r\n print('You do not have that item to use.')", "def find(self, event: Event = None) -> None:\n if not self.loaded:\n return\n c, n, p = self.c, 0, self.c.p\n sc = self.spellController\n w = c.frame.body.wrapper\n c.selectPosition(p)\n s = w.getAllText().rstrip()\n ins = w.getInsertPoint()\n # New in Leo 5.3: use regex to find words.\n last_p = p.copy()\n while True:\n for m in self.re_word.finditer(s[ins:]):\n start, word = m.start(0), m.group(0)\n if word in self.seen:\n continue\n n += 1\n # Ignore the word if numbers precede or follow it.\n # Seems difficult to do this in the regex itself.\n k1 = ins + start - 1\n if k1 >= 0 and s[k1].isdigit():\n continue\n k2 = ins + start + len(word)\n if k2 < len(s) and s[k2].isdigit():\n continue\n alts: list[str] = sc.process_word(word)\n if alts:\n self.currentWord = word\n i = ins + start\n j = i + len(word)\n self.showMisspelled(p)\n self.tab.fillbox(alts, word)\n c.invalidateFocus()\n c.bodyWantsFocus()\n w.setSelectionRange(i, j, insert=j)\n k = g.see_more_lines(s, j, 4)\n w.see(k)\n return\n self.seen.add(word)\n # No more misspellings in p\n p.moveToThreadNext()\n if p:\n ins = 0\n s = p.b\n else:\n g.es(\"no more misspellings\")\n c.selectPosition(last_p)\n self.tab.fillbox([])\n c.invalidateFocus()\n c.bodyWantsFocus()\n return", "def use(self):\n return_string = ''\n item = input(f\"What do you want to use?\\n>\")\n if item in self.backpack:\n if self.backpack[item].type is \"Food\":\n if (self.health + self.backpack[item].heal_amount) > standard_health:\n self.health = standard_health\n else:\n self.health += self.backpack[item].heal_amount\n self.backpack[item].charges -= 1\n return_string = f\"You ate {self.backpack[item].name}. {self.backpack[item].heal_amount} health restored\"\n if self.backpack[item].charges == 0:\n del self.backpack[item]\n return return_string\n else:\n return \"You cant eat this\"\n else:\n return \"You dont have this\"", "def fixWord(self,phrase):\n if(\"spellCheck\" in self._classes):\n return self._spellChecker.fixWord(phrase)", "def __editUserPEL(self):\n from QScintilla.SpellChecker import SpellChecker\n pel = SpellChecker.getUserDictionaryPath(True)\n self.__editSpellingDictionary(pel)", "def __enableSpellingActions(self):\n from QScintilla.SpellChecker import SpellChecker\n spellingAvailable = SpellChecker.isAvailable()\n \n self.spellCheckAct.setEnabled(\n len(self.editors) != 0 and spellingAvailable)\n self.autoSpellCheckAct.setEnabled(spellingAvailable)", "def DoAction(self,event):\r\n selections = self.list.GetSelections()\r\n if not selections: return bell()\r\n itemDex = selections[0]\r\n item = self.items[itemDex]\r\n self.data.action(item)", "def add(self, event: Event = None) -> None:\n if self.loaded:\n w = self.currentWord\n if w:\n self.spellController.add(w)\n self.tab.onFindButton()", "def _onWord(self, name, location, length):\n logging.debug(\"onWord...\")", "def __showEditSpellingMenu(self):\n proj = e5App().getObject(\"Project\")\n projetOpen = proj.isOpen()\n pwl = e5App().getObject(\"Project\").getProjectDictionaries()[0]\n self.__editProjectPwlAct.setEnabled(projetOpen and bool(pwl))\n pel = e5App().getObject(\"Project\").getProjectDictionaries()[1]\n self.__editProjectPelAct.setEnabled(projetOpen and bool(pel))\n \n from QScintilla.SpellChecker import SpellChecker\n pwl = SpellChecker.getUserDictionaryPath()\n self.__editUserPwlAct.setEnabled(bool(pwl))\n pel = SpellChecker.getUserDictionaryPath(True)\n self.__editUserPelAct.setEnabled(bool(pel))", "def change(self, event: Event = None) -> None:\n if self.handler:\n self.openSpellTab()\n self.handler.change()\n else:\n self.openSpellTab()", "def spell_attack(self, caster, spell_name, targets, cost, **kwargs):\n spell_msg = \"%s casts %s!\" % (caster, spell_name)\n\n atkname_single = \"The spell\"\n atkname_plural = \"spells\"\n min_damage = 10\n max_damage = 20\n accuracy = 0\n attack_count = 1\n\n # Retrieve some variables from kwargs, if present\n if \"attack_name\" in kwargs:\n atkname_single = kwargs[\"attack_name\"][0]\n atkname_plural = kwargs[\"attack_name\"][1]\n if \"damage_range\" in kwargs:\n min_damage = kwargs[\"damage_range\"][0]\n max_damage = kwargs[\"damage_range\"][1]\n if \"accuracy\" in kwargs:\n accuracy = kwargs[\"accuracy\"]\n if \"attack_count\" in kwargs:\n attack_count = kwargs[\"attack_count\"]\n\n to_attack = []\n # If there are more attacks than targets given, attack first target multiple times\n if len(targets) < attack_count:\n to_attack = to_attack + targets\n extra_attacks = attack_count - len(targets)\n for n in range(extra_attacks):\n to_attack.insert(0, targets[0])\n else:\n to_attack = to_attack + targets\n\n # Set up dictionaries to track number of hits and total damage\n total_hits = {}\n total_damage = {}\n for fighter in targets:\n total_hits.update({fighter: 0})\n total_damage.update({fighter: 0})\n\n # Resolve attack for each target\n for fighter in to_attack:\n attack_value = randint(1, 100) + accuracy # Spell attack roll\n defense_value = self.get_defense(caster, fighter)\n if attack_value >= defense_value:\n spell_dmg = randint(min_damage, max_damage) # Get spell damage\n total_hits[fighter] += 1\n total_damage[fighter] += spell_dmg\n\n for fighter in targets:\n # Construct combat message\n if total_hits[fighter] == 0:\n spell_msg += \" The spell misses %s!\" % fighter\n elif total_hits[fighter] > 0:\n attack_count_str = atkname_single + \" hits\"\n if total_hits[fighter] > 1:\n attack_count_str = \"%i %s hit\" % (total_hits[fighter], atkname_plural)\n spell_msg += \" %s %s for %i damage!\" % (\n attack_count_str,\n fighter,\n total_damage[fighter],\n )\n\n caster.db.mp -= cost # Deduct MP cost\n\n caster.location.msg_contents(spell_msg) # Message the room with spell results\n\n for fighter in targets:\n # Apply damage\n self.apply_damage(fighter, total_damage[fighter])\n # If fighter HP is reduced to 0 or less, call at_defeat.\n if fighter.db.hp <= 0:\n self.at_defeat(fighter)\n\n if self.is_in_combat(caster): # Spend action if in combat\n self.spend_action(caster, 1, action_name=\"cast\")", "def spell_file(fn, wordcost, maxword):\n\n def infer_spaces(s):\n \"\"\"Uses dynamic programming to infer the location of spaces in a string\n without spaces.\"\"\"\n global unfolded\n if s in unfolded:\n return unfolded[s]\n\n # Find the best match for the i first characters, assuming cost has\n # been built for the i-1 first characters.\n # Returns a pair (match_cost, match_length).\n def best_match(i):\n candidates = enumerate(reversed(cost[max(0, i-maxword):i]))\n return min((c + wordcost.get(s[i-k-1:i], 9e999), k+1) for k,c in candidates)\n\n # Build the cost array.\n cost = [0]\n for i in range(1,len(s)+1):\n c,k = best_match(i)\n cost.append(c)\n\n # Backtrack to recover the minimal-cost string.\n out = []\n i = len(s)\n while i>0:\n c,k = best_match(i)\n assert c == cost[i]\n out.append(s[i-k:i])\n i -= k\n \n unfolded[s] = ' '.join(reversed(out))\n return ' '.join(reversed(out))\n\n\n\n speller = aspell.Speller('lang', 'en')\n for w in slang:\n speller.addtoSession(w)\n \n with open(tweet_tmp1_dir + fn, 'r') as fin:\n with open(tweet_tmp2_dir + fn, 'w') as fout:\n res = []\n for l in fin:\n prefix = ''\n if 'test' in fn:\n comma = l.find(',')\n prefix = l[:comma].strip()\n l = l[comma+1:]\n try:\n assert(prefix.isdigit())\n except:\n print(prefix, l)\n prefix += ','\n \n ll = ''\n \n ws = [w for w in l.strip().split(' ') if len(w) > 0]\n for w in ws:\n if w in correct_word:\n nw = correct_word[w]\n elif (w.startswith('<') and w.endswith('>')) or w in whitelist or speller.check(w):\n nw = w\n else:\n try:\n nw1, nw2 = speller.suggest(w)[:2]\n nwdist1 = jellyfish.levenshtein_distance(w,nw1)\n nwdist2 = jellyfish.levenshtein_distance(w,nw2)\n \n if nw2.count(' ') < nw1.count(' ') or (nwdist1 > MAX_DIST_CORRECTION and nwdist2 < nwdist1) :\n nw1 = nw2\n nwdist1 = nwdist2\n if nwdist1 <= MAX_DIST_CORRECTION:\n nw = nw1.lower()\n else:\n nw = w.lower()\n except:\n nw = infer_spaces(w)\n if nw.count('.') >= nw.count(' ')/3:\n nw = nw.replace('.', '')\n elif nw.count('-') >= nw.count(' ')/3:\n nw = nw.replace('-', '')\n nw = nw.replace(' ', ' ').lower()\n ll += nw + ' '\n correct_word[w] = nw\n res.append(prefix+ll.strip())\n# fout.write(prefix+ll.strip()+'\\n')\n fout.write('\\n'.join(res))", "def on_correct_answer_select(self, spinner, text):\n\n self.answer = text\n self.multiple_choice_answer = text", "def __editUserPWL(self):\n from QScintilla.SpellChecker import SpellChecker\n pwl = SpellChecker.getUserDictionaryPath()\n self.__editSpellingDictionary(pwl)", "def correct_spell(tweet):\n\n\n tweet = tweet.split()\n for i in range(len(tweet)):\n if tweet[i] in downloaded_dictionary.keys():\n tweet[i] = downloaded_dictionary[tweet[i]]\n tweet = ' '.join(tweet)\n return tweet", "def get_weapon(self):\n\n return self.suggestion_set[1]", "def execute_for_command(self, skill_input: SkillInput, services: AssistantServicesBase):\n voice = skill_input.adjective.lower()\n if voice in (\"female\", \"male\"):\n services.settings_service.voice = voice\n services.settings_service.save_settings()\n services.user_interaction_service.speak('Okay, I will use a %s voice from now on.' % (voice), True)\n else:\n services.user_interaction_service.speak('I don\\'t understand what voice you want')", "def setSpellchecking(self, color=QtCore.Qt.blue):\n self.format.setUnderlineStyle(\n QtGui.QTextCharFormat.SpellCheckUnderline)\n self.format.setUnderlineColor(color)", "def cursor_changed(self, column_side, bypass_selection=\"\"):\n\n column = None\n aligned_column = None\n if column_side == LEFT_TEXT:\n column = self._window.column1\n aligned_column = self._window.column2\n else:\n column = self._window.column2\n aligned_column = self._window.column1\n\n w = None\n if bypass_selection != \"\":\n # bypass the selection and process, used by search_highlight\n w = bypass_selection\n else:\n # else, just select the clicked word\n w = column.align_disp.editor.get_clicked_word()\n\n if w and w != \"\" and w != column.align_disp.currentWord:\n try:\n word, aligned_word, goldsmith_rslt, goldsmith_rslt_2 = self.controller.process_word(w, column_side)\n\n # Highlighting\n column.align_disp.editor.clean_highlight(first_pos=column.align_disp.editor.first_highlighted_block,\n last_pos=column.align_disp.editor.last_highlighted_block)\n column.align_disp.editor.refresh_highlight(word.str)\n aligned_column.align_disp.editor.clean_highlight(first_pos=aligned_column.align_disp.editor.first_highlighted_block,\n last_pos=aligned_column.align_disp.editor.last_highlighted_block)\n aligned_column.align_disp.editor.refresh_highlight(aligned_word.str, color=QtGui.QColor(255, 255, 100))\n\n align_rslt = \"{} : <b>{}</b>\".format(self.model.dist_words[word.str][aligned_word.str], aligned_word.str)\n\n column.info_word.set_word(word.str)\n column.info_word.set_text(align_rslt)\n column.see_also.set_text(goldsmith_rslt)\n column.align_disp.currentWord = word.str\n column.align_disp.sidebar.currentVect = word.pos\n column.align_disp.sidebar.draw_vector()\n\n aligned_column.info_word.set_word(aligned_word.str)\n aligned_column.info_word.set_text(\"See also\")\n # TODO : goldsmith on the second column, maybe paste the code or add eternal function\n aligned_column.see_also.set_text(goldsmith_rslt_2)\n aligned_column.align_disp.currentWord = aligned_word.str\n aligned_column.align_disp.sidebar.currentVect = aligned_word.pos\n aligned_column.align_disp.sidebar.draw_vector()\n\n except WordNotInDatabase:\n column.align_disp.editor.clean_highlight(first_pos=column.align_disp.editor.first_highlighted_block,\n last_pos=column.align_disp.editor.last_highlighted_block)\n aligned_column.align_disp.editor.clean_highlight(first_pos=aligned_column.align_disp.editor.first_highlighted_block,\n last_pos=aligned_column.align_disp.editor.last_highlighted_block)\n column.info_word.set_word(\"Not found\")\n column.info_word.set_text(\"Alignment results\")\n column.see_also.set_text(\"Goldsmith algorithm results\")\n column.align_disp.currentWord = None\n column.align_disp.sidebar.currentVect = [0, 1]\n column.align_disp.sidebar.draw_vector()\n\n aligned_column.info_word.set_word(\"Not found\")\n aligned_column.info_word.set_text(\"See also\")\n aligned_column.see_also.set_text(\"Goldsmith algorithm results\")\n aligned_column.align_disp.currentWord = None\n aligned_column.align_disp.sidebar.currentVect = [0, 1]\n aligned_column.align_disp.sidebar.draw_vector()\n\n except DataNotProcessed:\n column.align_disp.editor.clean_highlight(first_pos=column.align_disp.editor.first_highlighted_block,\n last_pos=column.align_disp.editor.last_highlighted_block)\n aligned_column.align_disp.editor.clean_highlight(first_pos=aligned_column.align_disp.editor.first_highlighted_block,\n last_pos=aligned_column.align_disp.editor.last_highlighted_block)", "def spell(self, word):\n if not self.__isValidInput(word):\n return False\n\n result = self.__lib.voikkoSpellUcs4(self.__handle, word)\n if result == 0:\n return False\n elif result == 1:\n return True\n else:\n raise VoikkoException(\"Internal error returned from libvoikko\")", "def _sense_and_act(self):\n pass", "def ignore(self, event: Event = None) -> None:\n if self.loaded:\n w = self.currentWord\n if w:\n self.spellController.ignore(w)\n self.tab.onFindButton()", "def test_spelling(self) -> None:\n misstakes: Dict[Word, List[str]] = self.report.spellcheck(\n self.rules.spelling_skip_wordclasses\n )\n for word, corrections in misstakes.items():\n if word.text.lower() in self.rules.forbidden_words:\n continue\n if word.text.lower() in [\n ab[\"word\"] for ab in self.rules.police_abbreviations\n ]:\n continue\n error_text: str = f\"Ordet {word.text} รคr felstavat.\"\n if corrections:\n error_text += \" Rรคttningsfรถrslag: \" + \", \".join(corrections) + \".\"\n self.add_error(error_text, word=word)", "def use(target, name):\n return \"You find no use of this item\"", "def onWordRecognised(self, *_args):\n # Unsubscribe to the event when talking,\n # to avoid repetitions\n memory.unsubscribeToEvent(\"WordRecognized\",\"AudioRecognition\")\n\n # We access to the word recognised in the memory\n word = memory.getData(\"WordRecognized\")\n\n # Debug : Print the word recognised\n print(\"Mot :\")\n print(word[0])\n print(\"Indice de confiance :\")\n print(word[1])\n print\n\n\n # We acknoledge a word if the trust is high enough\n if (word[1] > 0.28):\n self.mot = word[0]\n #self.tts.say(\"Le mot reconnu est :\"+self.mot)\n StateManager(self)\n \n\n # Subscribe again to the event\n memory.subscribeToEvent(\"WordRecognized\",\n \"AudioRecognition\",\n \"onWordRecognised\")", "def func(self):\n if not self.args:\n self.msg(\n \"{wYou are currently speaking:{n %s\"\n % self.caller.languages.current_language.capitalize()\n )\n self.list_languages()\n return\n if \"translate\" in self.switches:\n obj = self.caller.search(self.args)\n if not obj:\n return\n translation = obj.item_data.translation\n matches = False\n for lang in self.caller.languages.known_languages:\n if lang in translation:\n self.msg(\n \"You translate the following from %s:\\n%s\"\n % (lang.capitalize(), translation[lang])\n )\n matches = True\n if not matches:\n self.msg(\n \"%s does not seem to contain any foreign tongue you can read.\" % obj\n )\n return\n if not self.switches:\n args = self.args.lower()\n if args == \"arvani\" or args == \"common\":\n self.caller.attributes.remove(\"currently_speaking\")\n self.msg(\"{wYou are now speaking Arvani.{n\")\n return\n if args not in self.caller.languages.known_languages:\n self.msg(\"You cannot speak %s.\" % self.args)\n self.list_languages()\n return\n self.caller.db.currently_speaking = args\n self.msg(\"{wYou are now speaking %s.{n\" % self.args)\n return\n player = self.caller.player.search(self.lhs)\n if not player:\n return\n targ = player.char_ob\n if not targ:\n self.msg(\"Not found.\")\n return\n if \"teachme\" in self.switches:\n if self.caller.languages.additional_languages <= 0:\n self.msg(\n \"You need a higher rank of linguistics before you can learn anything else.\"\n )\n return\n req = targ.ndb.language_requests or {}\n req[self.caller] = self.rhs\n targ.ndb.language_requests = req\n self.msg(\"You request that %s teach you %s.\" % (targ, self.rhs))\n targ.msg(\n \"{w%s has requested that you teach them %s.{n\" % (self.caller, self.rhs)\n )\n return\n if \"teach\" in self.switches:\n req = self.caller.ndb.language_requests or {}\n if targ not in req:\n self.msg(\"You do not have a request from %s.\" % targ)\n return\n lang = req[targ].lower()\n if lang not in self.caller.languages.known_languages:\n self.msg(\"You do not know %s.\" % lang)\n self.list_languages()\n return\n if targ.languages.max_languages <= len(targ.languages.known_languages):\n self.msg(\"They know as many languages as they can learn.\")\n return\n targ.languages.add_language(lang)\n self.msg(\"You have taught %s to %s.\" % (lang, targ))\n targ.msg(\"%s has taught you %s.\" % (self.caller, lang))\n return", "def cmd(name: str) -> Callable:\n return g.new_cmd_decorator(name, ['c', 'spellCommands',])", "def test_selection_name(self):\n skill = create_skill()\n skill.speak = mock.Mock()\n skill.get_response = mock.Mock()\n\n skill.get_response.return_value = 'octopus'\n\n options = ['a balloon', 'an octopus', 'a piano']\n response = skill.ask_selection(options, 'which is better')\n self.assertEqual(options[1], response)\n\n # Assert that the spoken sentence contains all options.\n spoken_sentence = skill.speak.call_args[0][0]\n for opt in options:\n self.assertTrue(opt in spoken_sentence)", "def use_skill(self, g, i, x, y):\n # @ param g a reference to the game engine\n # @ param i the index of the skill (basically what skill)\n # @ param x the x target coordinate in game pixels\n # @ param y the y target coordinate in game pixels\n if self.attackTimer < self.attackDelay:\n print(\"attack on CD\")\n return\n \n if self.skill[i].skillAttr == 0:\n g.fire_skill_sound.play()\n elif self.skill[i].skillAttr == 1:\n g.ice_skill_sound.play()\n elif self.skill[i].skillAttr == 2:\n g.lightning_skill_sound.play()\n elif self.skill[i].skillAttr == 3:\n g.poison_skill_sound.play()\n \n \n if self.skill[i].skillKey == 0: #Aura\n #turn the aura on/off\n if self.skill[i].active == False:\n #print(\"aura on\")\n self.skill[i].active = True\n else:\n self.skill[i].active = False\n #print(\"aura off\")\n \n elif self.skill[i].skillKey == 1: #Missile\n if self.mana[0] > self.skill[i].skillCost:\n self.mana[0] -= self.skill[i].skillCost\n self.attackTimer = 0\n target = Target(x, y)\n center_x = self.rect.x + (self.rect.width / 2)\n center_y = self.rect.y + (self.rect.height / 2)\n #bullet types: fire 5, ice 6, lightning 7\n #skill types: fire 0, ice 1, lightning 2\n g.bullets.append(self.bulletFactory.createBullet(g, self.skill[i].skillAttr + 5, 0, self.attack, 1024, target, center_x, center_y))\n #print(\"missile\")\n\n elif self.skill[i].skillKey == 2: #Breath\n #for each creep in the AoE cone, do damage.\n if self.mana[0] > self.skill[i].skillCost:\n self.mana[0] -= self.skill[i].skillCost\n self.attackTimer = 0\n #get low and high angle (-45 degrees and +45 degrees from player -> point angle)\n lowAngle = math.atan2(y - self.rect.centery, x - self.rect.centerx) - 3.1415 / 2.0\n highAngle = math.atan2(y - self.rect.centery, x - self.rect.centerx) + 3.1415 / 2.0\n for creep in g.creeps:\n #get angle to creep\n creepAngle = math.atan2(creep.rect.centery - self.rect.centery, creep.rect.centerx - self.rect.centerx)\n \n #if angle to the creep is between the two angles\n if creepAngle > lowAngle and creepAngle < highAngle:\n #and the distance to the creep is below the skill's range\n if ( (creep.rect.centerx - self.rect.centerx) ** 2 + (creep.rect.centery - self.rect.centery) ** 2 ) ** 0.5 < 4 * 24:\n creep.take_damage( self.attack )\n #print(\"breath\")\n #apply debuffs, based on type\n if self.skill[i].skillAttr == 0: #fire\n creep.applyBurning()\n elif self.skill[i].skillAttr == 1: #frost\n creep.applyChilled()\n elif self.skill[i].skillAttr == 2: #lightning\n creep.applyShocked()", "def choice1(choice, ghost):\n if choice == \"1\":\n if \"flashlight\" not in items:\n print_pause(\"The Railway station is really foggy\", 2)\n print_pause(\"You can see absolute nothing\", 2)\n print_pause(\"You walk on and find a flashlight\", 2)\n light_choice(ghost)\n else:\n print_pause(\n \"You already pasted this way\\nPlease choose another way!\", 2)\n logic(ghost)", "def handle_suggest():\n return 0", "def google_suggest(self, callback, who, arg, store=True):\n\t\t\n sugs = self.get_xml('http://google.com/complete/search', {'output':'toolbar', 'q': arg})\n\n if sugs is not None:\n try:\n sugs = [x[0].get('data') for x in sugs]\n except Exception, e:\n print \"XML error with Google Suggest: %s\" % e\n\t\t\t\n suggestions = self.remove_lyrics(sugs)\n random_sug = choice(suggestions)\n\t\t\t\n # Same string as we started with - roll again\n if random_sug == arg:\n try:\n suggestions.pop(suggestions.index(random_sug))\n except:\n pass\n random_sug = choice(suggestions)\n\t\t\t\t\n if random_sug is not None:\n if store:\n self.store_suggestion(who, arg)\n random_sug.strip('')\n random_sug.strip('\\r')\n w = random_sug.split()\n if w[0].lower() in ('what', 'why', 'was', 'where', 'who', 'which', 'whom', 'when', 'how', 'is', 'are', 'did'):\n if '?' not in w[-1:]:\n random_sug = random_sug + '?'\n return random_sug", "def choose_word(word_list):\n word = random.choice(word_list)\n word = word.lower()\n return word", "def check(self, word):\n spellings = []\n # word in lexicon\n if word in self._lexicon:\n spellings.append(word)\n # word not in lexicon\n else:\n for item in self._lexicon:\n dist = self.min_edit_distance(word, item)\n if dist == 1:\n spellings.append(item)\n\n return spellings", "def select(self, coord: Coord) -> None:\n active_team = self.units_manager.active_team\n self.prev_sel = self.curr_sel\n self.curr_sel = coord\n\n if self.prev_sel is None:\n # Nothing has been previously selected\n sel_unit = self.get_unit(coord)\n if sel_unit is None or sel_unit.played:\n self.move_area = []\n self.attack_area = []\n self.update_highlight()\n else:\n # Show the currently selected unit's move and attack area\n self.update_move_area()\n self.move_attack_area()\n self.update_highlight()\n else:\n # Something has been previously selected\n if self.prev_unit is not None and self.curr_unit is not None:\n # Selected a unit two times\n if self.prev_sel == self.curr_sel and not self.prev_unit.played and active_team.is_mine(self.prev_unit):\n # Two times on the same playable unit. Show the action menu.\n self.action_menu()\n elif self.curr_sel in self.attack_area:\n # Two different units: prev_unit can attack curr_unit\n # This results in a combined action: move the unit next to the enemy and propose the user to attack\n target_unit = self.curr_unit\n nearest = self.arrow.path[-1] if self.arrow.path else self.prev_sel\n if self.nearby_enemies(self.prev_unit, nearest):\n animation = self.make_move_unit_animation(self.prev_unit, nearest, self.arrow.path)\n self.add_move_unit_animation(animation)\n self.move_unit(self.prev_unit, nearest)\n self.curr_sel = nearest # otherwise move_undo will move back the defending unit!\n self.still_attack_area()\n self.update_highlight()\n self.action_menu(attacking=self.curr_unit, defending=target_unit)\n else:\n self.reset_selection()\n else:\n # Two different units: prev_unit can't attack curr_unit\n # show the current unit's move and attack area\n self.update_move_area()\n self.move_attack_area()\n self.update_highlight()\n elif self.can_selection_move():\n # Move the previously selected unit to the currently selected coordinate.\n animation = self.make_move_unit_animation(self.prev_unit, self.curr_sel, self.arrow.path)\n self.add_move_unit_animation(animation)\n self.move_unit(self.prev_unit, self.curr_sel)\n self.still_attack_area()\n self.update_highlight()\n self.action_menu()\n else:\n # Previously something irrelevant was chosen\n self.reset_selection()\n self.curr_sel = coord\n\n if self.curr_unit is not None and not self.curr_unit.played:\n # Selected a unit: show its move and attack area\n self.update_move_area()\n self.move_attack_area()\n\n self.update_highlight()\n\n self.arrow.set_path([])", "def pull_suggestion(self, callback, who, arg):\n\t\t\n random_sug = self.dong.db.get_random_row('suggest')\n res = self.google_suggest(callback, who, random_sug[2], False)\n\t\t\n w = res.split()\n if w[0].lower() in ('what', 'why', 'was', 'where', 'who', 'which', 'whom', 'when', 'how', 'is', 'are', 'did'):\n if w[-1:] != '?':\n res = res + '?'\n return res.capitalize()", "def check_target(event):\n target_text.set(target_text.get().lower())", "def update_word(self, word):\n self.word = word", "def radioButtonWeapon_Clicked( self, event ):\n\t\tself.activateTreasureBox(1)", "def use(self, target, name):\n return self.usable.use(target, name)", "def wrapped(widget):\n if r:\n obj.player_choice = random.choice(list(evilrps.Throws))\n else:\n # print(widget, 'throw', throw)\n obj.player_choice = throw\n obj.advance()", "def get_stopwords(choice = 0):\n low_acc_words = [u'orange', u'game', u'wafe', u'gold', u'gas pump', u'dock', u'magnetic disk', u'beard', u'splash', u'stethoscope', u'clock', u'modem', u'spring', u'dribble', u'scale', u'thing', u'parachute', u'screw', u'haired', u'hair spray', u'stick', u'projectile', u'surface', u'scarf', u'boat', u'lantern', u'weapon', u'fire screen', u'maypole', u'Old World buffalo', u'backpack', u'velvet', u'pistol', u'duplicator', u'tissue', u'holding', u'eel', u'iron', u'zoo', u'toilet seat', u'eye', u'telephone', u'drum', u'pepper', u'church', u'pillow', u'body', u'mink', u'prison', u'color', u'jewelry', u'elephant', u'mug', u'cargo ship', u'football', u'llama', u'wombat', u'ax', u'giant panda', u'bison', u'climber', u'tractor', u'hamster', u'beetle', u'sidewalk', u'oilseed', u'shore', u'feet', u'vending machine', u'nail', u'lock', u'licking', u'crowded', u'pudding', u'library', u'sliding', u'steel drum', u'cutter', u'trench coat', u'plate rack', u'fancy', u'barbershop', u'switch', u'hip', u'petting', u'keyboard', u'drilling platform', u'denim', u'old', u'sewing machine', u'dancing', u'lawn mower', u'jaguar', u'cauliflower', u'bubble', u'tray', u'printer', u'hillside', u'heater', u'store', u'stove', u'hook', u'bed', u'book jacket', u'rain barrel', u'dinosaur', u'rowing', u'surf', u'worm', u'garbage truck', u'laptop', u'mouth', u'flute', u'tape player', u'gym', u'large', u'birdhouse', u'covered', u'groom', u'swan', u'lampshade', u'snowplow', u'ramp', u'bathing cap', u'strainer', u'hard', u'mortarboard', u'penguin', u'wooden spoon', u'loaf of bread', u'window', u\"potter's wheel\", u'branch', u'fly', u'greyhound', u'walk', u'starfish', u'kitchen', u'parking meter', u'cassette', u'work', u'cash machine', u'custard apple', u'play', u'ice cream', u'mosque', u'market', u'swing', u'hay', u'fan', u'surfer', u'number', u'climb', u'golfcart', u'burrito', u'feather boa', u'resting', u'neck brace', u'glove', u'remote control', u'lotion', u'lamp', u'perched', u'jeep', u'necklace', u'shopping basket', u'sea urchin', u'pajama', u'pinwheel', u'foot', u'maze', u'squash', u'dishrag', u'bib', u'ant', u'dumbbell', u'dragonfly', u'bakery', u'lighter', u'salamander', u'sandglass', u'apron', u'cannon', u'palm', u'tent', u'spacecraft', u'oil filter', u'beer bottle', u'throne', u'stretcher', u'bedroom', u'pan', u'camera', u'kiddie', u'mashed potato', u'railing', u'tongue', u'sky', u'event', u'bright', u'curb', u'sundial', u'screwdriver', u'hand blower', u'joystick', u'flower', u'tv', u'back', u'smile', u'mortar', u'bee', u'bath', u'spatula', u'lawn', u'object', u'barrier', u'mailbox', u'fallen', u'crayfish', u'kid', u'metal', u'shot', u'quill', u'snowboarding', u'mud', u'vacuum', u'water tower', u'sleeping bag', u'altar', u'bassoon', u'family', u'shovel', u'leather', u'maillot', u'soap dispenser', u'blurry', u'racetrack', u'dish', u'gondola', u'chewing', u'badger', u'spindle', u'door', u'shaker', u'purse', u'apiary', u'bus', u'wreck', u'cell', u'balance beam', u'lip', u'animal', u'baby', u'toilet', u'armor plate', u'jigsaw puzzle', u'piggy bank', u'leafhopper', u'torch', u'ashcan', u'talking', u'traveling', u'handrail', u'area', u'raft', u'can opener', u'missile', u'syringe', u'pen', u'beacon', u'croquet ball', u'trail', u'snowboard', u'light', u'owl', u'lift', u'acorn', u'pencil box', u'hermit crab', u'binder', u'ladle', u'fire engine', u'tan', u'volcano', u'chocolate sauce', u'crossword puzzle', u'whistle', u'floating', u'forklift', u'hotdog', u'monotreme', u'eggnog', u'traffic', u'envelope', u'surfboard', u'face', u'polecat', u'tiled', u'camel', u'refrigerator', u'carousel', u'parking', u'spider web', u'stream', u'train', u'square', u'candle', u'thimble', u'jellyfish', u'teddy', u'leash', u'wild', u'shopping cart', u'jackfruit', u'office', u'alligator', u'ready', u'end', u'power drill', u'lens cap', u'looking', u'hand', u'fountain', u'radiator', u'French horn', u'graze', u'female', u'koala', u'paper towel', u'artichoke', u'passenger', u'airship', u'cow', u'slug', u'home', u'tug', u'weasel', u'including', u'crutch', u'submarine', u'chime', u'pretty', u'phone', u'barrow', u'purple', u'pulling', u'wing', u'mongoose', u'washer', u'slide', u'Band Aid', u'splashing', u'obstacle', u'flying', u'restaurant', u'pencil sharpener', u'control', u'something', u'tricycle', u'motor', u'watching', u'grey', u'balcony', u'surrounded', u'statue', u'rotisserie', u'puck', u'assorted', u'umbrella', u'measuring cup', u'hanging', u'ride', u'scuba', u'perform', u'tusker', u'desk', u'puddle', u'sea slug', u'team', u'beaker', u'held', u'safe', u'shower curtain', u'isopod', u'tire', u'beaver', u'tower', u'stump', u'dinner', u'conch', u'playground', u'marmot', u'fruit', u'golf ball', u'read', u'tile', u'watch', u'mosquito net', u'goggle', u'swab', u'cricket', u'wheelie', u'guacamole', u'bush', u'cockroach', u'intersection', u'letter opener', u'station', u'plow', u'course', u'aeroplane', u'view', u'racing', u'broom', u'sunny', u'corn', u'matchstick', u'variety', u'messy', u'playpen', u'ambulance', u'perfume', u'brush', u'go', u'shelf', u'look', u'blowing', u'lobster', u'lettuce', u'busy', u'digging', u'trampoline', u'track', u'glass', u'ox', u'handstand', u'assortment', u'vase', u'aircraft carrier', u'microwave', u'high', u'mousetrap', u'bathroom', u'shower cap', u'counter', u'Christmas stocking', u'safety pin', u'plastic', u'garden', u'transit', u'knife', u'docked', u'cluttered', u'serving', u'toddler', u'ledge', u'formation', u'snorkel', u'lying', u'lemon', u'ladybug', u'carry', u'solar dish', u'hammer', u'sleeping', u'saltshaker', u'cowboy', u'unicycle', u'single', u'rule', u'shoji', u'business', u'cup', u'antique', u'catch', u'open', u'carnival', u'cooking', u'rural', u'small', u'wine', u'top', u'flat', u'yurt', u'grasshopper', u'hoop', u'wallet', u'hold', u'someone', u'necked', u'salad', u'leafe', u'paddlewheel', u'porcupine', u'radio telescope', u'preparing', u'canopy', u'pointing', u'honeycomb', u'older', u'hair slide', u'plunger', u'mirror', u'landscape', u'bow', u'cart', u'skateboard', u'device', u'urban', u'sunset', u'attached', u'toward', u'right', u'town', u'four', u'beach wagon', u'close', u'lone', u'chew', u'pile', u'working', u'bottlecap', u'corner', u'swinging', u'behind', u'slot machine', u'food', u'mushroom', u'around', u'tall', u'oxygen mask', u'together', u'veggy', u'skating', u'concrete', u'subway', u'seen', u'head', u'armadillo', u'ly', u'kitten', u'cap', u'painted', u'mustache', u'moving', u'lit', u'sliced', u'sticking', u'milk can', u'roller', u'stainless', u'teeth', u'seated', u'serve', u'lady', u'carriage', u'stand', u'apple', u'paper', u'apartment', u'video', u'eating', u'stadium', u'turn', u'racket', u'stunt', u'plate', u'drinking', u'slice', u'warplane', u'cheese', u'onion', u'backyard', u'coffee', u'peach', u'staring', u'outfit', u'engine', u'coaster', u'striped', u'stacked', u'decorated', u'throwing', u'dirty', u'hula', u'mid', u'catching', u'closed', u'item', u'otter', u'rail', u'tenni', u'sink', u'toaster', u'meal', u'skate', u'fridge', u'pitch', u'kite', u'desktop', u'meat', u'military', u'fireplace', u'show', u'rider', u'rodeo', u'graffiti', u'bunch', u'coming', u'reading', u'walkway', u'another', u'mouse', u'soup', u'hole', u'steel', u'container', u'past', u'carrying', u'equipment', u'farm', u'dressed', u'scooter', u'cellphone', u'stuffed', u'commercial', u'platform', u'full', u'one', u'electronic', u'sprinkler', u'stop', u'along', u'blanket', u'residential', u'kneeling', u'blender', u'oven', u'cattle', u'skateboarder', u'produce', u'book', u'cement', u'bag', u'carrot', u'board', u'round', u'many', u'giant', u'shower', u'asian', u'picnic', u'dining', u'wedding', u'desert', u'huge', u'narrow', u'outside', u'deck', u'three', u'display', u'filled', u'cutting', u'colored', u'ear', u'feeding', u'across', u'eat', u'skateboarding', u'fighter', u'sun', u'darkened', u'brushing', u'ty', u'party', u'pedestrian', u'wet', u'structure', u'different', u'crossbone', u'jet', u'public', u'cooked', u'airplane', u'bread', u'clothe', u'tunnel', u'fishing', u'drife', u'gear', u'birthday', u'frisbee', u'piece', u'row', u'hydrant', u'drawn', u'meter', u'vegetable', u'broccoli', u'country', u'half', u'sandwich', u'doorway', u'lot', u'pair', u'luggage', u'long', u'christma', u'wii', u'guy', u'side', u'leap', u'plane', u'silver', u'post', u'bar', u'reaching', u'drink', u'reflection', u'wand', u'airport', u'photograph', u'type', u'lay', u'lap', u'waterfall', u'banana', u'next', u'baseball', u'hot', u'making', u'gray', u'using', u'batter', u'empty', u'bat', u'clear', u'hospital', u'scissor', u'neck', u'cake', u'alone', u'rope', u'winter', u'runway', u'broken', u'fire', u'getting', u'variou', u'distance', u'beer', u'outstretched', u'chocolate', u'match', u'stopped', u'vintage', u'clean', u'fork', u'cut', u'eaten', u'waiting', u'going', u'onto', u'nintendo', u'time', u'several', u'lined', u'railroad', u'case', u'mother', u'suitcase', u'taking', u'doughnut', u'smoke', u'controller', u'crossing', u'friend', u'closeup', u'couple', u'showing', u'made', u'big', u'trying', u'putting', u'hit', u'male', u'', u'pickelhaube', u'suburban', u'costume', u'enjoy', u'new', u'studio', u'mantis', u'pastum', u'gymnast', u'rafting', u'golden', u'waffle iron', u'watering', u'overhead', u'shoot', u'feature', u'machine', u'attempt', u'third', u'tulip', u'jungle', u'wind', u'fig', u'band', u'bone', u'free', u'cucumber', u'bouncing', u'boarding', u'tackled', u'__background__', u'gymnastic apparatus', u'pineapple', u'folded', u'rice', u'sunglasses', u'cushion', u'net', u'covering', u'pretzel', u'steam', u'santum', u'fair', u'sail', u'score', u'toothbrush', u'loaded', u'fry', u'life', u'glider', u'bounce', u'balance', u'cone', u'containing', u'beside', u'wheel', u'rain', u'spaghetti squash', u'thi', u'left', u'photographer', u'forested', u'vanity', u'shoulder', u'pavement', u'officer', u'creek', u'dead', u'ice', u'slide rule', u'dunking', u'horizon', u'raised', u'fabric', u'fight', u'way', u'war', u'landing', u'umpire', u'fashioned', u'dimly', u'topped', u'setting', u'sling', u'potato', u'painting', u'bottom', u'dance', u'crocodile', u'string', u'dig', u'gun', u'chicken', u'tarmac', u'falling', u'french', u'wait', u'pony', u'decker', u'plaza', u'earphone', u'chip', u'get', u'staircase', u'wakeboarder', u'wheelchair', u'pulled', u'polouse', u'still', u'curly', u'scaling', u'lunch', u'base', u'pizza', u'meat loaf', u'shown', u'opened', u'space', u'mess', u'headband', u'place', u'pelican', u'ring', u'sheet', u'bite', u'frame', u'hug', u'wide', u'lick', u'pastry', u'breakfast', u'take', u'topping', u'multiple', u'knee', u'tackling', u'sale', u'professional', u'german', u'crane', u'snack', u'stair', u'ping-pong ball', u'snowsuit', u'sport', u'bicyclist', u'skyscraper', u'checkered', u'restroom', u'tour', u'nearby', u'foggy', u'bmx', u'newspaper', u'mound', u'foam', u'driven', u'mohawk', u'rest', u'instrument', u'chainsaw', u'towel', u'facing', u'audience', u'served', u'clau', u'go-kart', u'tube', u'throw', u'muddy', u'harness', u'strip', u'racquet', u'prepare', u'low', u'pitcher', u'cardoon', u'gymnasium', u'pull', u'arranged', u'strawberry', u'deep', u'cream', u'rubber', u'trash', u'midair', u'peak', u'remote', u'disc', u'follow', u'potpie', u'enjoying', u'stool', u'leaping', u'action', u'taken', u'chopstick', u'flag', u'mounted', u'grill', u'wrestler', u'marble', u'backpacking', u'breaking', u'fungus', u'shade', u'egg', u'muzzled', u'style', u'carpeted', u'sauce', u'snowball', u'abacus', u'foreground', u'circuit', u'leading', u'airborne', u'hotel', u'leotard', u'kind', u'double', u'scabbard', u'bride', u'stall', u'blond', u'cave', u'electric', u'cigarette', u'sponsored', u'shepherd', u'dandelion', u'catcher', u'movie', u'recently', u'floaty', u'chambered nautilus', u'hitting', u'racer', u'passing', u'leaning', u'kissing', u'chase', u'funny', u'used', u'snail', u'pomegranate', u'stack', u'center', u'grind', u'bin', u'formal', u'shaped', u'signal', u'zucchini', u'parade', u'limb', u'laughing', u'step', u'range', u'slouse', u'block', u'downhill', u'jockey', u'retrieving', u'atop', u'cloth', u'skull', u'diving', u'rainy', u'tarp', u'black-footed ferret', u'nice', u'prepared', u'hot pot', u'land', u'fresh', u'hello', u'wrestle', u'kitty', u'spoon', u'rack', u'smaller', u'hose', u'giving', u'attire', u'leaving', u'chiton', u'singing', u'frog', u'crab', u'porch', u'saddle', u'donut', u'crossed', u'tied', u'tomato', u'chasing', u'scenic', u'beneath', u'boarder', u'hippopotamus', u'wading', u'sea_anemone', u'wrapped', u'shallow', u'steep', u'bagel', u'gather', u'pipe', u'hi', u'ha', u'jar', u'bug', u'finger', u'handle', u'beam', u'bean', u'whilst', u'contain', u'shake', u'attempting', u'merry', u'yawning', u'sniff', u'swimmer', u'commuter', u'bull', u'smoking', u'plain', u'cross', u'member', u'binoculars', u'underneath', u'well', u'fighting', u'bandanna', u'rocket', u'pay-phone', u'five', u'puppy', u'like', u'campfire', u'shaking', u'construction', u'bun', u'partially', u'flip', u'placed', u'bearing', u'pinatum', u'pie', u'boardwalk', u'pit', u'star', u'baked']\n\n STOPWORDS = ['none','inside', 'near', 'one', 'two', 'three', 'day', 'front', u'i', u'me', u'my', u'myself', u'we', u'our', u'ours', u'ourselves', u'you', u'your', u'yours', u'yourself', u'yourselves', u'he', u'him', u'his', u'himself', u'she', u'her', u'hers', u'herself', u'it', u'its', u'itself', u'they', u'them', u'their', u'theirs', u'themselves', u'what', u'which', u'who', u'whom', u'this', u'that', u'these', u'those', u'am', u'is', u'are', u'was', u'were', u'be', u'been', u'being', u'have', u'has', u'had', u'having', u'do', u'does', u'did', u'doing', u'a', u'an', u'the', u'and', u'but', u'if', u'or', u'because', u'as', u'until', u'while', u'of', u'at', u'by', u'for', u'with', u'about', u'against', u'between', u'into', u'through', u'during', u'before', u'after', u'above', u'below', u'to', u'from', u'up', u'down', u'in', u'out', u'on', u'off', u'over', u'under', u'again', u'further', u'then', u'once', u'here', u'there', u'when', u'where', u'why', u'how', u'all', u'any', u'both', u'each', u'few', u'more', u'most', u'other', u'some', u'such', u'no', u'nor', u'not', u'only', u'own', u'same', u'so', u'than', u'too', u'very', u's', u't', u'can', u'will', u'just', u'don', u'should', u'now', 'background', '__background__', '']\n \n\n unselected_words = [u'', u'pickelhaube', u'enjoy', u'new', u'studio', u'kissing', u'mantis', u'pastum', u'rafting', u'golden', u'waffle iron', u'watering', u'overhead', u'shoot', u'feature', u'machine', u'pizza', u'attempt', u'third', u'tulip', u'jungle', u'wind', u'fig', u'band', u'bone', u'free', u'bouncing', u'boarding', u'tackled', u'__background__', u'gymnasium', u'gymnastic apparatus', u'pineapple', u'folded', u'rice', u'sunglasses', u'cushion', u'net', u'covering', u'pretzel', u'steam', u'santum', u'fair', u'sail', u'score', u'toothbrush', u'loaded', u'fry', u'life', u'glider', u'balance', u'cone', u'containing', u'beside', u'wheel', u'rain', u'spaghetti squash', u'thi', u'left', u'photographer', u'forested', u'vanity', u'shoulder', u'pavement', u'officer', u'creek', u'dead', u'slide rule', u'dunking', u'horizon', u'raised', u'fabric', u'fight', u'way', u'war', u'landing', u'umpire', u'fashioned', u'dimly', u'topped', u'setting', u'sling', u'potato', u'bottom', u'dance', u'crocodile', u'ice', u'string', u'dig', u'gun', u'tarmac', u'falling', u'french', u'wait', u'decker', u'earphone', u'chip', u'get', u'staircase', u'wakeboarder', u'wheelchair', u'pulled', u'polouse', u'still', u'curly', u'scaling', u'lunch', u'meat loaf', u'shown', u'opened', u'space', u'mess', u'headband', u'place', u'pelican', u'ring', u'sheet', u'bite', u'hug', u'wide', u'lick', u'pastry', u'breakfast', u'take', u'topping', u'multiple', u'knee', u'bicyclist', u'sale', u'professional', u'german', u'snack', u'stair', u'ping-pong ball', u'snowsuit', u'sport', u'tackling', u'skyscraper', u'checkered', u'restroom', u'tour', u'nearby', u'foggy', u'bmx', u'newspaper', u'mound', u'chopstick', u'foam', u'driven', u'passing', u'mohawk', u'rest', u'instrument', u'chainsaw', u'towel', u'facing', u'audience', u'laughing', u'served', u'clau', u'diving', u'go-kart', u'tube', u'throw', u'harness', u'strip', u'racquet', u'prepare', u'low', u'pitcher', u'cardoon', u'pull', u'arranged', u'strawberry', u'deep', u'cream', u'rubber', u'trash', u'midair', u'peak', u'remote', u'suburban', u'disc', u'follow', u'potpie', u'gymnast', u'enjoying', u'stool', u'leaping', u'action', u'taken', u'flag', u'mounted', u'grill', u'wrestler', u'marble', u'pony', u'backpacking', u'breaking', u'fungus', u'shade', u'egg', u'style', u'carpeted', u'sauce', u'snowball', u'abacus', u'foreground', u'base', u'circuit', u'leading', u'airborne', u'hotel', u'leotard', u'kind', u'double', u'scabbard', u'bride', u'stall', u'blond', u'cave', u'zucchini', u'electric', u'cigarette', u'sponsored', u'shepherd', u'dandelion', u'catcher', u'movie', u'recently', u'floaty', u'chambered nautilus', u'hitting', u'racer', u'leaning', u'chase', u'funny', u'used', u'snail', u'pomegranate', u'cucumber', u'stack', u'center', u'grind', u'bin', u'formal', u'shaped', u'signal', u'parade', u'bounce', u'step', u'plaza', u'range', u'slouse', u'block', u'downhill', u'jockey', u'retrieving', u'atop', u'cloth', u'crane', u'skull', u'rainy', u'tarp', u'black-footed ferret', u'nice', u'prepared', u'hot pot', u'land', u'fresh', u'hello', u'wrestle', u'kitty', u'spoon', u'muzzled', u'rack', u'smaller', u'hose', u'giving', u'attire', u'leaving', u'chiton', u'limb', u'singing', u'frog', u'crab', u'porch', u'donut', u'crossed', u'tied', u'tomato', u'chasing', u'scenic', u'beneath', u'shaking', u'boarder', u'hippopotamus', u'wading', u'sea_anemone', u'wrapped', u'shallow', u'steep', u'bagel', u'gather', u'pipe', u'construction', u'painting', u'chicken', u'jar', u'bug', u'finger', u'handle', u'beam', u'bean', u'whilst', u'contain', u'costume', u'frame', u'shake', u'attempting', u'merry', u'yawning', u'sniff', u'swimmer', u'muddy', u'commuter', u'bull', u'smoking', u'plain', u'cross', u'member', u'binoculars', u'underneath', u'well', u'fighting', u'bandanna', u'rocket', u'pay-phone', u'five', u'puppy', u'like', u'campfire', u'saddle', u'hi', u'bun', u'ha', u'partially', u'flip', u'placed', u'bearing', u'pinatum', u'pie', u'boardwalk', u'pit', u'star', u'baked', u'smoke', u'hospital', u'type', u'hole', u'wand', u'chocolate sauce', u'haired', u'onto', u'drawn', u'wear', u'loaf of bread', u'beer', u'mushroom', u'lift', u'make', u'mother', u'cowboy', u'fork', u'otter', u'playpen', u'alone', u'hamburger', u'bottlecap', u'soup', u'cutter', u'square', u'friend', u'scuba', u'hockey', u'wheelie', u'picnic', u'tug', u'squash', u'case', u'inflatable', u'railroad', u'competition', u'slice', u'broken', u'jeep', u'trying', u'apartment', u'chewing', u'grasshopper', u'guacamole', u'splash', u'male', u'dishrag', u'kayaking', u'acorn', u'snowbank', u'clean', u'hit', u'batter', u'kick', u'jewelry', u'fighter', u'cooked', u'putting', u'try', u'wallet', u'mustache', u'artichoke', u'spaghetti sauce', u'crossing', u'retriever', u'veggy', u'produce', u'darkened', u'kiddie', u'mashed potato', u'closed', u'canopy', u'runway', u'vintage', u'fishing', u'doughnut', u'onion', u'leap', u'rodeo', u'cricket', u'made', u'closeup', u'chew', u'sliced', u'hot', u'deck', u'French horn', u'clothe', u'goggle', u'rowing', u'milk can', u'post', u'outstretched', u'chocolate', u'making', u'course', u'hula', u'carry', u'upside', u'desktop', u'lobster', u'suitcase', u'crossbone', u'ty', u'sea slug', u'polecat', u'sandwich', u'racetrack', u'lettuce', u'cockroach', u'toward', u'eaten', u'blender', u'giant', u'atv', u'big', u'holster', u'splashing', u'commercial', u'tunnel', u'bend', u'meter', u'including', u'badger', u'beach wagon', u'beard', u'beak', u'controller', u'match', u'buckle', u'hiker', u'barometer', u'bread', u'serve', u'object', u'stadium', u'tank', u'waterfall', u'stream', u'neck', u'serving', u'manhole cover', u'pitch', u'pistol', u'dribble', u'isopod', u'transit', u'dragonfly', u'huge', u'backyard', u'foot', u'jet', u'dancing', u'custard apple', u'porcupine', u'assorted', u'rope', u'cut', u'showing', u'lemon', u'armadillo', u'salad', u'carrot', u'biting', u'bee', u'hammer', u'lens cap', u'cauliflower', u'kicking', u'denim', u'marmot', u'nintendo', u'fireplace', u'landscape', u'turn', u'hoop', u'wedding', u'eggnog', u'antique', u'bow', u'winter', u'stacked', u'purse', u'beaver', u'kneeling', u'island', u'slot machine', u'Christmas stocking', u'public', u'narrow', u'ladybug', u'stopped', u'burrito', u'necked', u'cheese', u'crayfish', u'single', u'getting', u'tan', u'lined', u'handstand', u'letter opener', u'pencil box', u'doorway', u'leafhopper', u'residential', u'slug', u'eat', u'carriage', u'end', u'lap', u'distance', u'mink', u'sleeping bag', u'time', u'container', u'stunt', u'drife', u'broccoli', u'docked', u'structure', u'cooker', u'go', u'aircraft carrier', u'pudding', u'tape player', u'outfit', u'coaster', u'reaching', u'meat', u'splashed', u'hair slide', u'roller', u'submarine', u'toaster', u'dining', u'rotisserie', u'football', u'spindle', u'christma', u'thimble', u'giant panda', u'pedestrian', u'compass', u'squirrel', u'sea urchin', u'hotdog', u'peach', u'warplane', u'oil filter', u'waiting', u'hip', u'jaguar', u'mortar', u'gear', u'sprinkler', u'beer bottle', u'gondola', u'half', u'stainless', u'military', u'electronic', u'bat', u'handrail', u'perform', u'coffee maker', u'flat', u'round', u'meal', u'telephone', u'pool table', u'seagull', u'hermit crab', u'fancy', u'obstacle', u'honeycomb', u'gravel', u'ladle', u'farm', u'crossword puzzle', u'steel', u'drink', u'pepper', u'tongue', u'owl', u'rule', u'gym', u'seated', u'monotreme', u'cattle', u'water tower', u'vegetable', u'eel', u'variou', u'messy', u'raft', u'castle', u'fire', u'bib', u'skunk', u'gray', u\"carpenter's kit\", u'wombat', u'carnival', u'equipment', u'mousetrap', u'joystick', u'golf ball', u'shoji', u'banana', u'clear', u'sloth', u'glove', u'reel', u'desert', u'necklace', u'ear', u'digging', u'rural', u'asian', u'school', u'wreck', u'coffee', u'hydrant', u'mouse', u'mid', u'row', u'puddle', u'engine', u'mongoose', u'stopwatch', u'walkway', u'past', u'beacon', u'koala', u'lip', u'gold', u'scooter', u'puck', u\"potter's wheel\", u'ly', u'oilseed', u'tire', u'drum', u'party', u'radio telescope', u'worm', u'lay', u'magnetic disk', u'bar', u'butterfly', u'dinner', u'birthday', u'power drill', u'saltshaker', u'thing', u'ant', u'lantern', u'hard', u'weasel', u'ridden', u'paddlewheel', u'drilling platform', u'climber', u'safe', u'shower', u'airship', u'cassette player', u'printer', u'wooden spoon', u'bassoon', u'reflection', u'scissor', u'apiary', u'ice cream', u'rider', u'boathouse', u'mud', u'corn', u'guinea pig', u'snow leopard', u'mailbox', u'cement', u'bakery', u'taking', u'variety', u'swan', u'velvet', u'couple', u'fridge', u'strainer', u'dirty', u'screwdriver', u'jigsaw puzzle', u'device', u'alligator', u'oven', u'silver', u'urban', u'country', u'opener', u'leather', u'barrel', u'duck', u'drumstick', u'cake', u'ambulance', u'pencil sharpener', u'barrier', u'safety pin', u'right', u'baseball', u'beetle', u'ax', u'cassette', u'assortment', u'entree', u'armor plate', u'going', u'cart', u'can opener', u'curve', u'pointing', u'dribbling', u'sock', u'home', u'catching', u'church', u'mosque', u'measuring cup', u'striped', u'throne', u'skating', u'sundial', u'CD player', u'grille', u'brushing', u'jersey', u'plunger', u'conch', u'several', u'shaker', u'tile', u'stretcher', u'tower', u'plane', u'salamander', u'lock', u'platform', u'airport', u'hamster', u'graffiti', u'jackfruit', u'cabbage', u'blowing', u'kitten', u'yurt', u'cannon', u'powder', u'sea cucumber', u'sea cow', u'dinosaur', u'racing', u'primate', u'wii', u'skateboarding', u'blanket', u'mug', u'cap', u'challenging', u'throwing', u'library', u'quill', u'trench coat', u'microwave', u'tusker', u'cluttered', u'apple', u'duplicator', u'broom', u'wet', u'altar', u'show', u'heater', u'radiator', u'cargo ship', u'spatula', u'screw', u'neck brace', u'flute', u'peacock', u'sewing machine', u'reading', u'dough', u'rifle', u'long', u'penguin', u'playground', u'photograph', u'luggage', u'plow', u'item', u'factory', u'starfish', u'fire engine', u'locomotive', u'piggy bank', u'empty', u'scale', u'plate rack', u'graze', u'cutting', u'feeding', u'cooking', u'rapid', u'ledge', u'business', u'colored', u'forklift', u'boot', u'wing', u'remote control', u'trampoline', u'gas pump', u'space bar', u'snorkel', u'book', u'microscope', u'rain barrel', u'pair', u'Old World buffalo', u'airplane', u'creature', u'knee pad', u'whale', u'birdhouse', u'oxygen mask', u'bag', u'sailboat', u'mat', u'town', u'using', u'rugby ball', u'staring', u'shopping basket', u'binder', u'team', u'sailing vessel', u'ox', u'leopard', u'shield', u'full', u'Band Aid', u'mountaintop', u'crate', u'modem', u'family', u'tennis ball', u'barn', u'work', u'formation', u'barrow', u'goose', u'syringe', u'soap dispenser', u'kite', u'appliance', u'solar dish', u'lizard', u'paddling', u'cardigan', u'sink', u'control', u'toddler', u'mortarboard']\n\n useless_words = ['holding','hold' ,'wearing', 'wear' , 'standing','sitting', 'stand', 'sit' , 'smiling', 'smile', 'clothing', 'shirt', \"next\", 'posing', 'playing']\n abstract_words = ['beautiful', 'young']\n color_words = ['black', 'white', 'red', 'blue', 'brown']\n\n if choice == 1:\n return STOPWORDS\n\n STOPWORDS += unselected_words\n STOPWORDS += useless_words\n STOPWORDS += low_acc_words\n #STOPWORDS += color_words\n #STOPWORDS += abstract_words\n return STOPWORDS", "def choose(self, choice):\n if self.available(choice):\n self.select(choice)", "def chosen():\n wordList = loadWords()\n w = random.choice(wordList)\n word = w[:-1]\n return word", "def return_word():\n wordlist = load_words()\n word = random.choice(wordlist)\n return word", "def lesk(self, tweet, word, created_at, tweet_id):\n ikb_obj = self.database.get(self.collection_of_slangs, field='word', value=word)[0]\n ikb_id = ikb_obj[\"_id\"]\n\n dicts = ikb_obj['payload']\n elements = [value for dictt, item in dicts.items() for value in item]\n definitions, usages = self.extract_def_use(elements)\n if len(definitions) == 0:\n raise ValueError(\"Empty lists of definitions and usages\")\n usages_vec = self.model.vectorize_sentences(usages)\n tweet_vec = self.model.vectorize_sentences([tweet])\n cs = np.array(cosine_similarity(usages_vec, tweet_vec))\n ind_max = np.argmax(cs)\n\n best_definition = definitions[ind_max]\n dictionary_of_best_definition = EnrichmentLayer.find_name_of_dict_by_definition(dicts, best_definition)\n try:\n filter_for_search = {\"dictionary_title\": dictionary_of_best_definition, \"definition\": best_definition}\n document = self.database.get(self.collection_used_slang, filter=filter_for_search)[0]\n tweets = document['tweets']\n tweets.append(tweet_id)\n self.database.update(self.collection_used_slang, \"ikb_id\", ikb_id, {\"tweets\": tweets}, upsert=False)\n id_of_insert = document['_id']\n except IndexError:\n document = {'ikb_id': ikb_id, 'word': word, 'dictionary_title': dictionary_of_best_definition,\n 'definition': best_definition, 'created_at': created_at, 'tweets': [tweet_id]}\n id_of_insert = self.database.insert(self.collection_used_slang, document)\n\n return self.replace_word(tweet.split(), word, best_definition), best_definition, id_of_insert", "def get_spell_tag(page):\n soup = BeautifulSoup(page.text, 'html.parser')\n spell_tag = soup.find('a', {'class': 'spell'})\n\n return spell_tag", "def kindler (self,filename=''):\r\n\r\n import kindleflex as kindle\r\n from numbertools import rom_to_int, is_roman\r\n\r\n\r\n #To suspend spelling, while keeping default value\r\n check_spelling_was = self.check_spelling\r\n self.check_spelling = False\r\n\r\n\r\n YESTERMS_PLUS = YESTERMS+[' ',' ']\r\n\r\n #To load file\r\n while not filename:\r\n filename = input('FILENAME?')\r\n\r\n try:\r\n note_text = file_access.get_text_file(filename)\r\n except:\r\n display.noteprint(('ATTENTION','File cannot be found!'))\r\n note_text = ''\r\n if 'Highlight (' in note_text:\r\n #If there are highlights and notes\r\n\r\n note_obj = kindle.GetNotes(note_text)\r\n note_obj.set_for_kindle()\r\n\r\n else:\r\n note_obj = kindle.GetNotes(note_text)\r\n note_obj.set_for_kindle()\r\n note_obj.set_divider('Note -')\r\n\r\n note_iterator = note_obj.return_iterator()()\r\n active_qualities = set()\r\n\r\n\r\n if input('SHOW TEXT?') in YESTERMS_PLUS:\r\n print(note_text)\r\n\r\n for qual in note_obj.qualities:\r\n if input('Include sequence key for '+qual+'?') in YESTERMS_PLUS:\r\n active_qualities.add(qual)\r\n\r\n additional_keys = input('Add additional keys?') in YESTERMS_PLUS\r\n annotation_before = input('Add annotation before?') in YESTERMS_PLUS\r\n annotation_after = input('Add annotation after?') in YESTERMS_PLUS\r\n query_index = input('Query index position?') in YESTERMS_PLUS\r\n only_notes = input('Only include highlights with notes attached?')\r\n temp_c_i = input('Restrict to the following colors?')\r\n include_part = input('Include part?') in YESTERMS_PLUS\r\n if temp_c_i:\r\n colors_to_include = set(x.strip() for x in temp_c_i.split(','))\r\n else:\r\n colors_to_include = set()\r\n\r\n if not query_index:\r\n starting_index = ''\r\n while not starting_index:\r\n starting_index = input('Starting index position?')\r\n try:\r\n starting_index = Index(starting_index)\r\n except:\r\n pass\r\n else:\r\n starting_index = Index(-1)\r\n\r\n\r\n\r\n go_on = True\r\n note = ''\r\n count_down = 0\r\n automatic = False\r\n temp_i = ''\r\n display.noteprint(('NUMBER OF NOTES in COLLECTION',str(note_obj.size)))\r\n\r\n current_iteration=0\r\n while True:\r\n current_iteration+=1\r\n try:\r\n note = next(note_iterator)\r\n except:\r\n display.noteprint(('ATTENTION','FINISHED!'))\r\n break\r\n\r\n\r\n print(str(current_iteration)+'/'+str(note_obj.size))\r\n if count_down > 0:\r\n count_down -= 1\r\n\r\n else:\r\n\r\n new_keys = set()\r\n\r\n text = ''\r\n note_part = ''\r\n part = ''\r\n if 'TEXT' in note:\r\n text = note['TEXT']\r\n if 'NOTE' in note:\r\n note_part = note['NOTE']\r\n if 'highlightcolor' in note:\r\n highlight_color = note['highlightcolor']\r\n\r\n\r\n\r\n\r\n\r\n\r\n if not automatic and ((not colors_to_include or highlight_color in colors_to_include)\r\n and (not only_notes or note_part)):\r\n display.noteprint(('NUMBER OF NOTES in COLLECTION',str(note_obj.size)))\r\n display.noteprint(('CURRRENT POSITION',str(note_obj.position)))\r\n display.noteprint(('TEXT',text))\r\n display.noteprint(('NOTE',note_part))\r\n display.noteprint(('HIGHLIGHT COLOR',highlight_color))\r\n\r\n print_string = ''\r\n\r\n for qual in active_qualities:\r\n if qual in note:\r\n print_string += qual + ':' + note[qual] +', '\r\n if len(print_string)>1:\r\n print_string = print_string[0:-2]\r\n\r\n display.noteprint(('QAULITIES',print_string))\r\n\r\n temp_i = input('CREATE NOTE or quit to QUIT or NUMBERS to SKIP FORWARD or A(UTOMATIC) to add the rest of notes without querying')\r\n\r\n if temp_i.isnumeric():\r\n count_down = int(temp_i)\r\n if temp_i in ['A','AUTOMATIC']:\r\n\r\n automatic = True\r\n\r\n if temp_i in QUITTERMS:\r\n break\r\n\r\n\r\n elif (((not colors_to_include or highlight_color in colors_to_include)\r\n and (not only_notes or note_part))\r\n and (automatic or ((count_down == 0 and (temp_i in YESTERMS_PLUS or len(temp_i)>1 and temp_i[0]==' '))))):\r\n\r\n for qual in active_qualities:\r\n if qual in note:\r\n val = note[qual]\r\n if is_roman(val):\r\n val = str(rom_to_int(val))\r\n\r\n if qual == 'chapter':\r\n\r\n\r\n\r\n chapter_title = ''\r\n chapter = val\r\n\r\n if ':' in chapter:\r\n # THIS is specially designed for the kindle note format\r\n chapter_number, chapter_title = chapter.split(':')[0].strip(),chapter.split(':')[1].strip()\r\n if not chapter_number.isnumeric():\r\n try:\r\n chapter_number = str(rom_to_int(chapter_number.lower()))\r\n except:\r\n pass\r\n else:\r\n pass\r\n else:\r\n if not chapter.isnumeric() and not (part and include_part):\r\n\r\n part = chapter\r\n chapter_number = ''\r\n\r\n if chapter_number:\r\n new_keys.add('chapter@'+chapter_number)\r\n if chapter_title:\r\n new_keys.add('chaptertitle@'+chapter_title)\r\n else:\r\n new_keys.add(qual+'@'+val)\r\n\r\n if not automatic and additional_keys:\r\n for x in input('ADDITIONAL KEYS?').split(','):\r\n new_keys.add(x.strip())\r\n before, after = '',''\r\n if not automatic and annotation_before:\r\n before = input('ENTER ANNOTATION BEFORE?') +'/BREAK/'\r\n if not automatic and annotation_after:\r\n after = '/BREAK/' + input('ENTER ANNOTATION AFTER?')\r\n if note_part:\r\n note_part = '/BREAK/' + note_part\r\n\r\n\r\n\r\n if not automatic and query_index:\r\n new_index = None\r\n while not new_index:\r\n new_index = input('INDEX?')\r\n try:\r\n new_index = Index(new_index)\r\n except:\r\n pass\r\n else:\r\n new_index = starting_index\r\n\r\n final_text = before+text+after+note_part\r\n\r\n\r\n self.enter(ek=new_keys,\r\n et=final_text,\r\n right_at=query_index,\r\n ind=new_index)\r\n\r\n self.check_spelling = check_spelling_was", "def check_spellings(text):\n\n for word in vocabulary:\n text = correct(word, text, 0.7)\n return text", "def show_flashcard():\r\n random_key = choice(list(glossary))\r\n print('Define: ', random_key)\r\n input('Press return to see the correct definition')\r\n print(glossary[random_key])", "def on_text(self, char: str, game: type):", "def change_wordMeaning(self, conversation):\n if conversation == None:\n return\n\n # If no word was used in the last conversation\n if conversation.word == None and conversation.meaning != None:\n if self.random.random() <= self.model.new_word_rate: # Probability of 5% default\n new_word = self.create_word()\n while new_word in self.wordsuccess: # cannot have one word with multiple meanings\n new_word = self.create_word()\n print(\"New word:\", new_word)\n self.create_link(new_word, conversation.meaning)\n\n # If a word was used in the last conversation\n elif conversation.word != None:\n self.wordsuccess[conversation.word].append(conversation.success)\n\n # if the word was used R times, there is a chance it will be dropped\n if len(self.wordsuccess[conversation.word]) >= self.model.change_rate:\n if self.do_change(self.wordsuccess[conversation.word]):\n self.delete_link(conversation.word) # forget word\n else:\n self.wordsuccess[conversation.word] = [] # reset success", "def choose_option(friendly,enemy,opt1=\"Fight\",opt2=\"Bag\",opt3=\"Pokemon\",opt4 = \"Run\"):\n background_color = blit_background()[1]\n blit_friendly(friendly)\n blit_enemy(enemy)\n blit_health(friendly,enemy)\n pygame.display.update()\n pause(friendly,enemy,3) #to stop the click from 1st menu selecting option in second\n mouse_pos = 0,0\n while True:\n event_check(False, friendly,enemy)\n blit_background()\n opt_1 = pygame.draw.rect(screen,((background_color)),(60,540,300,70))\n blit_text(opt1,(70,545))\n opt_3 = pygame.draw.rect(screen,(background_color),(60,615,300,70))\n blit_text(opt2,(70,620))\n opt_2 = pygame.draw.rect(screen,(background_color),(360,540,300,70))\n blit_text(opt3,(370,545))\n opt_4 = pygame.draw.rect(screen,(background_color),(360,615,300,70))\n blit_text(opt4,(370,620))\n mouse_pos = get_click()\n blit_friendly(friendly)\n blit_enemy(enemy)\n blit_health(friendly,enemy)\n blit_text(\"What will you do?\",(800,580))\n pygame.display.update()\n if opt_1.collidepoint(mouse_pos):\n option = 1\n break\n elif opt_2.collidepoint(mouse_pos):\n option = 2\n break\n elif opt_3.collidepoint(mouse_pos):\n option = 3\n break\n elif opt_4.collidepoint(mouse_pos):\n option = 4\n break\n pygame.display.update()\n return option", "def tell_options(self, options):\n\n # store, in case we need to repeat\n self.last_options = options\n\n i = 0\n for d in ('left', 'forward', 'right', 'back'):\n i += 1\n if d in options:\n d_string = labyrinth_text.directions[d]\n nextpos = options[d]\n room_name, room_loc, room_dir = self.rooms[nextpos]\n\n if d == 'back':\n # dead ens are special\n if len(options) == 1:\n txt = labyrinth_text.deadend\n else:\n txt = labyrinth_text.youcangoback\n else:\n txt = labyrinth_text.youcango\n\n txt = txt.format(d_string, room_dir, room_name)\n #~ print txt\n \n #self.audio.play_presynthesized(nextpos * 5 + i)\n #~ self.audio.play_sound_file(self.sounds_direction[nextpos][d])\n self.audio.synthesize_and_play(txt)", "def tell_position(self, curpos):\n \n # store, in case we need to repeat\n self.last_curpos = curpos\n\n # fetch room strings\n room_name, room_loc, room_dir = self.rooms[curpos]\n \n # build utterance text\n txt = labyrinth_text.youare.format(room_loc, room_name)\n\n #~ print '-' * 70\n #~ print txt\n \n #self.audio.play_presynthesized(curpos * 5)\n #~ self.audio.play_sound_file(self.sounds_location[curpos])\n self.audio.synthesize_and_play(txt)", "def spell_corrector(words):\n de = enchant.Dict(\"de_DE\")\n en = enchant.Dict(\"en_GB\")\n new_words = []\n for w in words:\n if ( de.check(w) | en.check(w) ):\n new_words += [ w ]\n else:\n sug = de.suggest(w)\n if len(sug) > 0: new_words += [ sug[0] ]\n else: new_words += [ w ]\n return new_words", "def as_you_type_undo(self, event: Event) -> None:\n if not self.word:\n g.es('[no previous word]')\n return\n self.as_you_type_replace(self.word)", "def __dis_context__(self, context, word):\n senses = self.vs.get_senses(word, self.ignore_case)\n if self.verbose:\n print(\"Senses of a target word:\")\n print(senses)\n\n if len(senses) == 0: # means we don't know any sense for this word\n return None\n\n # collect context vectors\n vctx = [self.vc[c] for c in context]\n\n if len(vctx) == 0: # means we have no context\n return None\n # TODO: better return most frequent sense or make random choice\n\n # filter context vectors, if aplicable\n if self.filter_ctx >= 0:\n vctx = self.__filter__(vctx, senses, self.filter_ctx)\n\n if self.ctx_method == 'prob':\n avg_context = np.mean(vctx, axis=0)\n scores = [self.__logprob__(avg_context, self.vs[sense]) for sense, prob in senses]\n\n elif self.ctx_method == 'sim':\n avg_context = np.mean(vctx, axis=0)\n scores = [self.__cosine_sim__(avg_context, self.vs[sense]) for sense, prob in senses]\n if self.verbose:\n print(\"Sense probabilities:\")\n print(scores)\n\n else:\n raise ValueError(\"Unknown context handling method '%s'\" % self.ctx_method)\n\n # return sense (word#id), scores for senses\n return senses[np.argmax(scores)][0], scores", "async def choose(self, ctx, *args):\n choicelist = []\n for choice in args:\n choicelist.append(choice)\n result = random.choice(choicelist)\n await ctx.send(\"Like it or not, I choose {}!\".format(result))", "def func(self):\n try:\n if not self.switches or \"all\" in self.switches:\n self.list_favor()\n elif \"set\" in self.switches or \"add\" in self.switches:\n self.add_favor()\n elif \"remove\" in self.switches:\n self.remove_favor()\n else:\n raise CommandError(\"Invalid switch.\")\n except CommandError as err:\n self.msg(err)\n else:\n self.mark_command_used()", "def select_editor_contextual(menuName, onselected=None, *args, **kwargs):\n\n process_all_events()\n windows = Gtk.Window.list_toplevels()\n click_in_text(GPS.EditorBuffer.get().current_view().cursor(), button=3)\n\n def internal_onselected(windows):\n close_contextual(windows)\n process_all_events()\n if onselected:\n onselected(*args, **kwargs)\n\n GLib.idle_add(internal_onselected, windows)\n activate_contextual(windows, menuName)", "def setWordKnown(self):\n self.wordKnown = ''.join(['_ ' if w not in self.guessedRight else w for w in self.getWord()])", "def get_word(self, roll):\n self.passphrase.append(self.wordlist[int(roll)]);", "def give(self):\n if self.location.character:\n item = input(f\"What do you want to give to {self.location.character.name}?\\n>\")\n if item in self.backpack:\n if self.location.character.give(item):\n if isinstance(self.location.character, Friend):\n loot = self.location.character.possession\n self.backpack[loot.name] = loot\n self.location.character.treat = None\n self.location.character.possession = None\n del self.backpack[item]\n return f\"{self.location.character.name} accepted your gift, and gave you {loot}\"\n if isinstance(self.location.character, Enemy):\n name = self.location.character.name\n self.location.character = None\n del self.backpack[item]\n return f\"You fend off {name} with {item}\"\n else:\n return f\"It does not accept {item}\"\n else:\n return f\"{self.location.character.name} does not like {item}\"\n else:\n return \"You don't have this\"\n else:\n return \"There is no one here\"", "def select_word(options):\n options = list(set(options)) # Remove duplicate words\n selection = []\n if len(options) > 10:\n for n in range(10):\n word = random.choice(options)\n while word in selection:\n word = random.choice(options)\n selection.append(word)\n else:\n selection = options\n # Print selection options\n for n in range(len(selection)):\n index = n + 1\n print(\"{}. {}\".format(index, selection[n]))\n choice = input(\"Choice: \")\n if choice == 'x':\n sys.exit()\n if choice == 'p':\n choice = input(\"Word selection: \")\n word = selection[int(choice) - 1]\n return add_punctuation(word)\n choice = int(choice) - 1\n word = selection[choice]\n return word", "def show_word(self):\n self.display_word = len(self.chosen_word) * \"_ \"\n Donatello.draw_word(self.display_word)\n return self.display_word", "def func(self):\n char = self.character\n clothing = char.search(self.args, candidates=char.contents)\n if not clothing:\n return\n if not clothing.db.worn:\n char.msg(\"You're not wearing that!\")\n return\n if clothing.db.covered_by:\n char.msg(\"You have to take off %s first.\" % clothing.db.covered_by.name)\n return\n clothing.remove(char)", "def ability_3(self,target):\r\n damage = (self.get_dexterity()+self.get_strength())\r\n target.receive_damage(damage)", "def hook(self, sentence, words):\n pass", "def correct_spelling(sample):\n words = nltk.word_tokenize(sample[\"full_text\"])\n for word in words:\n for char, duplicate_char in duplicate_chars.items():\n word = re.sub(duplicate_char, char, word)\n #sample[\"full_text\"] = ' '.join([spell(w)] for w in tweet.split()])\n sample[\"full_text\"] = ' '.join([w for w in words])\n return sample", "def validate(self, context):\n _logger.info(\"SpellDictionary EN validated\")\n self.dictionary = {\"hello\" , \"world\", \"welcome\", \"to\", \"the\", \"ipopo\", \"tutorial\"}", "def spelling(self):\r\n return conf.lib.clang_getTranslationUnitSpelling(self)", "async def optin(self, ctx):\n optout.delete_one({\"_id\": ctx.author.id})\n await ctx.send(f\"You have **opted into** A Sound Mood. To leave the program, use ?optout.\")", "def text_to_speech(entry):\n text = entry.get_text()\n if text:\n subprocess.call([\"milena_say\", text])", "def custom_choice(update: Update, context: CallbackContext) -> int:\r\n update.message.reply_text(\r\n 'Primero agrega un tรญtulo a tu comentario, por ejemplo \"Atenciรณn\"'\r\n )\r\n\r\n return TYPING_CHOICE" ]
[ "0.6842631", "0.6699569", "0.6081688", "0.6040999", "0.57929784", "0.578911", "0.57451016", "0.56992674", "0.5644914", "0.56293833", "0.5603479", "0.5585289", "0.5547602", "0.5472606", "0.54705864", "0.5456883", "0.5443241", "0.54431385", "0.5424684", "0.5421093", "0.5412749", "0.54012674", "0.53954947", "0.5374056", "0.53507566", "0.53462225", "0.53375745", "0.5323557", "0.53062165", "0.53018856", "0.53013164", "0.52593726", "0.5253459", "0.52485955", "0.5235575", "0.52268493", "0.5222691", "0.52067465", "0.52057266", "0.5181225", "0.51790464", "0.515173", "0.514307", "0.5134489", "0.5131516", "0.51257706", "0.51211137", "0.512031", "0.5115969", "0.5098509", "0.5093334", "0.50533205", "0.5048704", "0.504676", "0.5020836", "0.5020012", "0.5000048", "0.4975003", "0.49614215", "0.49576315", "0.49552608", "0.4951181", "0.49380133", "0.49346855", "0.49258614", "0.49197453", "0.49148905", "0.4909106", "0.49051422", "0.49040082", "0.4896293", "0.48878586", "0.48795122", "0.48738882", "0.48606056", "0.48534322", "0.48498428", "0.4845217", "0.48425764", "0.48350492", "0.4830105", "0.48271686", "0.482631", "0.48242152", "0.4810714", "0.48045015", "0.47981724", "0.47972047", "0.47969756", "0.4796766", "0.47925413", "0.47923484", "0.47901306", "0.47899285", "0.47883424", "0.47829214", "0.47785038", "0.47777092", "0.47738042", "0.47668144" ]
0.69981647
0
Uses the selected spell
async def upgrade( event, spell: ('str', 'select a spell'), ): spell = get_spell_or_abort(spell) return f'{event.user:f} just upgraded their {spell}; It was a *next* level move!'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def cast(\n event,\n spell: ('str', 'select a spell'),\n):\n spell = get_spell_or_abort(spell)\n \n return f'{event.user:f} just used {spell}; It is super effective!'", "def func(self):\n spell_list = sorted(SPELLS.keys())\n args = self.args.lower()\n args = args.strip(\" \")\n caller = self.caller\n spell_to_learn = []\n\n if not args or len(args) < 3: # No spell given\n caller.msg(\"Usage: learnspell <spell name>\")\n return\n\n for spell in spell_list: # Match inputs to spells\n if args in spell.lower():\n spell_to_learn.append(spell)\n\n if spell_to_learn == []: # No spells matched\n caller.msg(\"There is no spell with that name.\")\n return\n if len(spell_to_learn) > 1: # More than one match\n matched_spells = \", \".join(spell_to_learn)\n caller.msg(\"Which spell do you mean: %s?\" % matched_spells)\n return\n\n if len(spell_to_learn) == 1: # If one match, extract the string\n spell_to_learn = spell_to_learn[0]\n\n if spell_to_learn not in self.caller.db.spells_known: # If the spell isn't known...\n caller.db.spells_known.append(spell_to_learn) # ...then add the spell to the character\n caller.msg(\"You learn the spell '%s'!\" % spell_to_learn)\n return\n if spell_to_learn in self.caller.db.spells_known: # Already has the spell specified\n caller.msg(\"You already know the spell '%s'!\" % spell_to_learn)\n \"\"\"\n You will almost definitely want to replace this with your own system\n for learning spells, perhaps tied to character advancement or finding\n items in the game world that spells can be learned from.\n \"\"\"", "def cast_spell(self, magic):\n title = 'Select type of spell to cast'\n options = self.stats['powers'].keys()\n option_display = []\n for power in options:\n option_display.append(\n \"%s x %d\" % (\n power,\n self.stats['powers'][power]\n )\n )\n choice = self.present_menu(option_display, title)\n self.stats['powers'][options[choice]] -= 1\n if self.stats['powers'][options[choice]] == 0:\n self.stats['powers'].pop(options[choice], None)\n if options[choice] in self.stats['opponent']['powers']:\n print(\n 'A %s spell increases your magic during this trial' % (\n options[choice]\n )\n )\n magic += 1\n print('Casting a spell...')\n self.stats['active'].append(self.stats['hand'].pop())\n return magic", "def __spellCheck(self):\n aw = self.activeWindow()\n if aw:\n aw.checkSpelling()", "def choose_word():\n pass", "def cast_spell(self, mp_needed: int, spell: str) -> str:\n if self.mp >= mp_needed:\n new_mp = self.mp - mp_needed\n self.mp = new_mp\n return f'{self.name} has successfully cast {spell} and now has {self.mp} MP!'\n return f'{self.name} does not have enough MP to cast {spell}!'", "def food_selected(self, arg):\n\t\tfood = fooditemdao.retrieve_food(self.selected_food.get())\n\t\tself.lbl_unit.config(text=food.info['unit'])", "def option_changed(self,*args):\r\n self.word = self.var.get()\r\n self.find_word(self.word)", "def unlock(self, with_spell=False):\n\n if not with_spell:\n print(\"You need to cast a spell first.\")\n else:\n print(with_spell)\n super(DoorNeedingSpell, self).unlock()", "def openSpellTab(self, event: Event = None) -> None:\n if g.unitTesting:\n return\n c = self.c\n log = c.frame.log\n tabName = 'Spell'\n if log.frameDict.get(tabName):\n log.selectTab(tabName)\n else:\n log.selectTab(tabName)\n self.handler = SpellTabHandler(c, tabName)\n # Bug fix: 2013/05/22.\n if not self.handler.loaded:\n log.deleteTab(tabName)\n # spell as you type stuff\n self.suggestions: list[str] = []\n self.suggestions_idx: int = None\n self.word: str = None\n self.spell_as_you_type = False\n self.wrap_as_you_type = False", "def my_spell(word):\r\n from autocorrect import spell\r\n \r\n corrected_word = ''\r\n rescued_typo = 0\r\n \r\n if len(word)>1: # one letter word are not considered \r\n \r\n # try to correct typo\r\n if is_typo(word): \r\n print('typo: ' + word)\r\n word = spell(word)\r\n print('autocorrected typo: ' + word)\r\n\r\n if not is_typo(word): \r\n rescued_typo = 1\r\n corrected_word = word\r\n else:\r\n corrected_word = word\r\n\r\n return corrected_word, rescued_typo", "def spell_correction(self, tweet):\n return self.spell_correct.correct(tweet)", "def func(self):\n caller = self.caller\n\n if not self.lhs or len(self.lhs) < 3: # No spell name given\n caller.msg(\"Usage: cast <spell name> = <target>, <target2>, ...\")\n if not caller.db.spells_known:\n caller.msg(\"You don't know any spells.\")\n return\n else:\n caller.db.spells_known = sorted(caller.db.spells_known)\n spells_known_msg = \"You know the following spells:|/\" + \"|/\".join(\n caller.db.spells_known\n )\n caller.msg(spells_known_msg) # List the spells the player knows\n return\n\n spellname = self.lhs.lower() # noqa - not used but potentially useful\n spell_to_cast = []\n spell_targets = []\n\n if not self.rhs:\n spell_targets = []\n elif self.rhs.lower() in [\"me\", \"self\", \"myself\"]:\n spell_targets = [caller]\n elif len(self.rhs) > 2:\n spell_targets = self.rhslist\n\n for spell in caller.db.spells_known: # Match inputs to spells\n if self.lhs in spell.lower():\n spell_to_cast.append(spell)\n\n if spell_to_cast == []: # No spells matched\n caller.msg(\"You don't know a spell of that name.\")\n return\n if len(spell_to_cast) > 1: # More than one match\n matched_spells = \", \".join(spell_to_cast)\n caller.msg(\"Which spell do you mean: %s?\" % matched_spells)\n return\n\n if len(spell_to_cast) == 1: # If one match, extract the string\n spell_to_cast = spell_to_cast[0]\n\n if spell_to_cast not in SPELLS: # Spell isn't defined\n caller.msg(\"ERROR: Spell %s is undefined\" % spell_to_cast)\n return\n\n # Time to extract some info from the chosen spell!\n spelldata = SPELLS[spell_to_cast]\n\n # Add in some default data if optional parameters aren't specified\n if \"combat_spell\" not in spelldata:\n spelldata.update({\"combat_spell\": True})\n if \"noncombat_spell\" not in spelldata:\n spelldata.update({\"noncombat_spell\": True})\n if \"max_targets\" not in spelldata:\n spelldata.update({\"max_targets\": 1})\n\n # Store any superfluous options as kwargs to pass to the spell function\n kwargs = {}\n spelldata_opts = [\n \"spellfunc\",\n \"target\",\n \"cost\",\n \"combat_spell\",\n \"noncombat_spell\",\n \"max_targets\",\n ]\n for key in spelldata:\n if key not in spelldata_opts:\n kwargs.update({key: spelldata[key]})\n\n # If caster doesn't have enough MP to cover the spell's cost, give error and return\n if spelldata[\"cost\"] > caller.db.mp:\n caller.msg(\"You don't have enough MP to cast '%s'.\" % spell_to_cast)\n return\n\n # If in combat and the spell isn't a combat spell, give error message and return\n if spelldata[\"combat_spell\"] is False and self.rules.is_in_combat(caller):\n caller.msg(\"You can't use the spell '%s' in combat.\" % spell_to_cast)\n return\n\n # If not in combat and the spell isn't a non-combat spell, error ms and return.\n if spelldata[\"noncombat_spell\"] is False and self.rules.is_in_combat(caller) is False:\n caller.msg(\"You can't use the spell '%s' outside of combat.\" % spell_to_cast)\n return\n\n # If spell takes no targets and one is given, give error message and return\n if len(spell_targets) > 0 and spelldata[\"target\"] == \"none\":\n caller.msg(\"The spell '%s' isn't cast on a target.\" % spell_to_cast)\n return\n\n # If no target is given and spell requires a target, give error message\n if spelldata[\"target\"] not in [\"self\", \"none\"]:\n if len(spell_targets) == 0:\n caller.msg(\"The spell '%s' requires a target.\" % spell_to_cast)\n return\n\n # If more targets given than maximum, give error message\n if len(spell_targets) > spelldata[\"max_targets\"]:\n targplural = \"target\"\n if spelldata[\"max_targets\"] > 1:\n targplural = \"targets\"\n caller.msg(\n \"The spell '%s' can only be cast on %i %s.\"\n % (spell_to_cast, spelldata[\"max_targets\"], targplural)\n )\n return\n\n # Set up our candidates for targets\n target_candidates = []\n\n # If spell targets 'any' or 'other', any object in caster's inventory or location\n # can be targeted by the spell.\n if spelldata[\"target\"] in [\"any\", \"other\"]:\n target_candidates = caller.location.contents + caller.contents\n\n # If spell targets 'anyobj', only non-character objects can be targeted.\n if spelldata[\"target\"] == \"anyobj\":\n prefilter_candidates = caller.location.contents + caller.contents\n for thing in prefilter_candidates:\n if not thing.attributes.has(\"max_hp\"): # Has no max HP, isn't a fighter\n target_candidates.append(thing)\n\n # If spell targets 'anychar' or 'otherchar', only characters can be targeted.\n if spelldata[\"target\"] in [\"anychar\", \"otherchar\"]:\n prefilter_candidates = caller.location.contents\n for thing in prefilter_candidates:\n if thing.attributes.has(\"max_hp\"): # Has max HP, is a fighter\n target_candidates.append(thing)\n\n # Now, match each entry in spell_targets to an object in the search candidates\n matched_targets = []\n for target in spell_targets:\n match = caller.search(target, candidates=target_candidates)\n matched_targets.append(match)\n spell_targets = matched_targets\n\n # If no target is given and the spell's target is 'self', set target to self\n if len(spell_targets) == 0 and spelldata[\"target\"] == \"self\":\n spell_targets = [caller]\n\n # Give error message if trying to cast an \"other\" target spell on yourself\n if spelldata[\"target\"] in [\"other\", \"otherchar\"]:\n if caller in spell_targets:\n caller.msg(\"You can't cast '%s' on yourself.\" % spell_to_cast)\n return\n\n # Return if \"None\" in target list, indicating failed match\n if None in spell_targets:\n # No need to give an error message, as 'search' gives one by default.\n return\n\n # Give error message if repeats in target list\n if len(spell_targets) != len(set(spell_targets)):\n caller.msg(\"You can't specify the same target more than once!\")\n return\n\n # Finally, we can cast the spell itself. Note that MP is not deducted here!\n try:\n spelldata[\"spellfunc\"](\n caller, spell_to_cast, spell_targets, spelldata[\"cost\"], **kwargs\n )\n except Exception:\n log_trace(\"Error in callback for spell: %s.\" % spell_to_cast)", "def pick_word(self):\n self.chosen_word = random.choice(self.words_list)\n return self.chosen_word", "def changeThenFind(self, event: Event = None) -> None:\n if self.handler:\n self.openSpellTab()\n f = self.handler.changeThenFind\n f()\n else:\n self.openSpellTab()", "def spell_a_word(cls, voice_transcript, skill, **kwargs):\n tags = cls._extract_tags(voice_transcript, skill['tags'])\n for tag in tags:\n reg_ex = re.search(tag + ' ([a-zA-Z]+)', voice_transcript)\n try:\n if reg_ex:\n search_text = reg_ex.group(1)\n for letter in search_text:\n cls.response(letter)\n time.sleep(2)\n except Exception as e:\n logging.debug(e)\n cls.response(\"I can't spell the word\")", "def _select_translation(self, ref, current, entered):\n s = entered\n if entered.lower() == \"y\":\n s = ref\n elif current and entered == \"\":\n s = current\n return s", "def as_you_type_onkey(self, tag: str, kwargs: Any) -> None:\n if kwargs['c'] != self.c:\n return\n if kwargs['ch'] not in '\\'\",.:) \\n\\t':\n return\n c = self.c\n spell_ok = True\n if self.spell_as_you_type: # might just be for wrapping\n w = c.frame.body.wrapper\n txt = w.getAllText()\n i = w.getInsertPoint()\n word = txt[:i].rsplit(None, 1)[-1]\n word = ''.join(i if i.isalpha() else ' ' for i in word).split()\n if word:\n word = word[-1]\n ec = c.spellCommands.handler.spellController\n suggests = ec.process_word(word)\n if suggests:\n spell_ok = False\n g.es(' '.join(suggests[:5]) +\n ('...' if len(suggests) > 5 else ''),\n color='red')\n elif suggests is not None:\n spell_ok = False\n g.es('[no suggestions]')\n self.suggestions = suggests\n self.suggestion_idx = 0\n self.word = word\n if spell_ok and self.wrap_as_you_type and kwargs['ch'] == ' ':\n w = c.frame.body.wrapper\n txt = w.getAllText()\n i = w.getInsertPoint()\n # calculate the current column\n parts = txt.split('\\n')\n popped = 0 # chars on previous lines\n while len(parts[0]) + popped < i:\n popped += len(parts.pop(0)) + 1 # +1 for the \\n that's gone\n col = i - popped\n if col > self.page_width:\n txt = txt[:i] + '\\n' + txt[i:] # replace space with \\n\n w.setAllText(txt)\n c.p.b = txt\n w.setInsertPoint(i + 1) # must come after c.p.b assignment", "def as_you_type_toggle(self, event: Event) -> None:\n if self.spell_as_you_type:\n self.spell_as_you_type = False\n if not self.wrap_as_you_type:\n g.unregisterHandler('bodykey2', self.as_you_type_onkey)\n g.es(\"Spell as you type disabled\")\n return\n self.spell_as_you_type = True\n if not self.wrap_as_you_type:\n g.registerHandler('bodykey2', self.as_you_type_onkey)\n g.es(\"Spell as you type enabled\")", "def secondary_effect(self, saved, caster, spell_effect):\n if saved:\n # print(\"Shoke off effect\")\n self.cleanup_effect(caster, spell_effect)\n else:\n pass", "def __setAutoSpellChecking(self):\n enabled = self.autoSpellCheckAct.isChecked()\n Preferences.setEditor(\"AutoSpellCheckingEnabled\", enabled)\n for editor in self.editors:\n editor.setAutoSpellChecking()", "def testPlaySpell(self):\n\n spell = self._prepare_card(C30007) # ็ซ็ƒๆœฏ\n\n self.assertEqual(spell.id, C30007)\n self.game.run_player_action(pa.PlaySpell(self.game, spell, self.p1.hero))\n\n self._assertEventType(GSE + [\n std_e.OnPlaySpell, std_e.SpellBenderPhase, std_e.SpellText, std_e.Damage, std_e.AfterSpell,\n ])", "def focusToSpell(self, event: Event = None) -> None:\n self.openSpellTab() # Makes Spell tab visible.\n # This is not a great idea. There is no indication of focus.\n # if self.handler and self.handler.tab:\n # self.handler.tab.setFocus()", "def use(target, name):\n out = target.damage() + \"\\n\"\n return out + \"You swing the \" + name + \" at \" + target.name", "def do_use(self, arg):\r\n itemToUse = arg.lower()\r\n \r\n if itemToUse == '':\r\n print('Use what? Type \"inv\" to see the items in your invetory.')\r\n return\r\n \r\n cantUse = False\r\n \r\n #look up the item the player describes\r\n invDescWords = getAllDescWords(inventory)\r\n \r\n if itemToUse not in invDescWords:\r\n print('You do not have that item to use it')\r\n return\r\n \r\n for item in getAllItemsMatchingDesc(itemToUse, inventory):\r\n if worldItems[item].get(USEABLE, True) == False:\r\n cantUse = True\r\n continue\r\n print('%s' % (worldItems[item][USEDESCTRUE]))\r\n #print('You use %s' % (worldItems[item][SHORTDESC]))\r\n #inventory.remove(item) \r\n return\r\n \r\n if cantUse:\r\n print('You cannot use \"%s\".' % (itemToUse))\r\n else:\r\n print('You do not have that item to use.')", "def find(self, event: Event = None) -> None:\n if not self.loaded:\n return\n c, n, p = self.c, 0, self.c.p\n sc = self.spellController\n w = c.frame.body.wrapper\n c.selectPosition(p)\n s = w.getAllText().rstrip()\n ins = w.getInsertPoint()\n # New in Leo 5.3: use regex to find words.\n last_p = p.copy()\n while True:\n for m in self.re_word.finditer(s[ins:]):\n start, word = m.start(0), m.group(0)\n if word in self.seen:\n continue\n n += 1\n # Ignore the word if numbers precede or follow it.\n # Seems difficult to do this in the regex itself.\n k1 = ins + start - 1\n if k1 >= 0 and s[k1].isdigit():\n continue\n k2 = ins + start + len(word)\n if k2 < len(s) and s[k2].isdigit():\n continue\n alts: list[str] = sc.process_word(word)\n if alts:\n self.currentWord = word\n i = ins + start\n j = i + len(word)\n self.showMisspelled(p)\n self.tab.fillbox(alts, word)\n c.invalidateFocus()\n c.bodyWantsFocus()\n w.setSelectionRange(i, j, insert=j)\n k = g.see_more_lines(s, j, 4)\n w.see(k)\n return\n self.seen.add(word)\n # No more misspellings in p\n p.moveToThreadNext()\n if p:\n ins = 0\n s = p.b\n else:\n g.es(\"no more misspellings\")\n c.selectPosition(last_p)\n self.tab.fillbox([])\n c.invalidateFocus()\n c.bodyWantsFocus()\n return", "def use(self):\n return_string = ''\n item = input(f\"What do you want to use?\\n>\")\n if item in self.backpack:\n if self.backpack[item].type is \"Food\":\n if (self.health + self.backpack[item].heal_amount) > standard_health:\n self.health = standard_health\n else:\n self.health += self.backpack[item].heal_amount\n self.backpack[item].charges -= 1\n return_string = f\"You ate {self.backpack[item].name}. {self.backpack[item].heal_amount} health restored\"\n if self.backpack[item].charges == 0:\n del self.backpack[item]\n return return_string\n else:\n return \"You cant eat this\"\n else:\n return \"You dont have this\"", "def fixWord(self,phrase):\n if(\"spellCheck\" in self._classes):\n return self._spellChecker.fixWord(phrase)", "def __editUserPEL(self):\n from QScintilla.SpellChecker import SpellChecker\n pel = SpellChecker.getUserDictionaryPath(True)\n self.__editSpellingDictionary(pel)", "def DoAction(self,event):\r\n selections = self.list.GetSelections()\r\n if not selections: return bell()\r\n itemDex = selections[0]\r\n item = self.items[itemDex]\r\n self.data.action(item)", "def __enableSpellingActions(self):\n from QScintilla.SpellChecker import SpellChecker\n spellingAvailable = SpellChecker.isAvailable()\n \n self.spellCheckAct.setEnabled(\n len(self.editors) != 0 and spellingAvailable)\n self.autoSpellCheckAct.setEnabled(spellingAvailable)", "def add(self, event: Event = None) -> None:\n if self.loaded:\n w = self.currentWord\n if w:\n self.spellController.add(w)\n self.tab.onFindButton()", "def _onWord(self, name, location, length):\n logging.debug(\"onWord...\")", "def __showEditSpellingMenu(self):\n proj = e5App().getObject(\"Project\")\n projetOpen = proj.isOpen()\n pwl = e5App().getObject(\"Project\").getProjectDictionaries()[0]\n self.__editProjectPwlAct.setEnabled(projetOpen and bool(pwl))\n pel = e5App().getObject(\"Project\").getProjectDictionaries()[1]\n self.__editProjectPelAct.setEnabled(projetOpen and bool(pel))\n \n from QScintilla.SpellChecker import SpellChecker\n pwl = SpellChecker.getUserDictionaryPath()\n self.__editUserPwlAct.setEnabled(bool(pwl))\n pel = SpellChecker.getUserDictionaryPath(True)\n self.__editUserPelAct.setEnabled(bool(pel))", "def change(self, event: Event = None) -> None:\n if self.handler:\n self.openSpellTab()\n self.handler.change()\n else:\n self.openSpellTab()", "def spell_attack(self, caster, spell_name, targets, cost, **kwargs):\n spell_msg = \"%s casts %s!\" % (caster, spell_name)\n\n atkname_single = \"The spell\"\n atkname_plural = \"spells\"\n min_damage = 10\n max_damage = 20\n accuracy = 0\n attack_count = 1\n\n # Retrieve some variables from kwargs, if present\n if \"attack_name\" in kwargs:\n atkname_single = kwargs[\"attack_name\"][0]\n atkname_plural = kwargs[\"attack_name\"][1]\n if \"damage_range\" in kwargs:\n min_damage = kwargs[\"damage_range\"][0]\n max_damage = kwargs[\"damage_range\"][1]\n if \"accuracy\" in kwargs:\n accuracy = kwargs[\"accuracy\"]\n if \"attack_count\" in kwargs:\n attack_count = kwargs[\"attack_count\"]\n\n to_attack = []\n # If there are more attacks than targets given, attack first target multiple times\n if len(targets) < attack_count:\n to_attack = to_attack + targets\n extra_attacks = attack_count - len(targets)\n for n in range(extra_attacks):\n to_attack.insert(0, targets[0])\n else:\n to_attack = to_attack + targets\n\n # Set up dictionaries to track number of hits and total damage\n total_hits = {}\n total_damage = {}\n for fighter in targets:\n total_hits.update({fighter: 0})\n total_damage.update({fighter: 0})\n\n # Resolve attack for each target\n for fighter in to_attack:\n attack_value = randint(1, 100) + accuracy # Spell attack roll\n defense_value = self.get_defense(caster, fighter)\n if attack_value >= defense_value:\n spell_dmg = randint(min_damage, max_damage) # Get spell damage\n total_hits[fighter] += 1\n total_damage[fighter] += spell_dmg\n\n for fighter in targets:\n # Construct combat message\n if total_hits[fighter] == 0:\n spell_msg += \" The spell misses %s!\" % fighter\n elif total_hits[fighter] > 0:\n attack_count_str = atkname_single + \" hits\"\n if total_hits[fighter] > 1:\n attack_count_str = \"%i %s hit\" % (total_hits[fighter], atkname_plural)\n spell_msg += \" %s %s for %i damage!\" % (\n attack_count_str,\n fighter,\n total_damage[fighter],\n )\n\n caster.db.mp -= cost # Deduct MP cost\n\n caster.location.msg_contents(spell_msg) # Message the room with spell results\n\n for fighter in targets:\n # Apply damage\n self.apply_damage(fighter, total_damage[fighter])\n # If fighter HP is reduced to 0 or less, call at_defeat.\n if fighter.db.hp <= 0:\n self.at_defeat(fighter)\n\n if self.is_in_combat(caster): # Spend action if in combat\n self.spend_action(caster, 1, action_name=\"cast\")", "def spell_file(fn, wordcost, maxword):\n\n def infer_spaces(s):\n \"\"\"Uses dynamic programming to infer the location of spaces in a string\n without spaces.\"\"\"\n global unfolded\n if s in unfolded:\n return unfolded[s]\n\n # Find the best match for the i first characters, assuming cost has\n # been built for the i-1 first characters.\n # Returns a pair (match_cost, match_length).\n def best_match(i):\n candidates = enumerate(reversed(cost[max(0, i-maxword):i]))\n return min((c + wordcost.get(s[i-k-1:i], 9e999), k+1) for k,c in candidates)\n\n # Build the cost array.\n cost = [0]\n for i in range(1,len(s)+1):\n c,k = best_match(i)\n cost.append(c)\n\n # Backtrack to recover the minimal-cost string.\n out = []\n i = len(s)\n while i>0:\n c,k = best_match(i)\n assert c == cost[i]\n out.append(s[i-k:i])\n i -= k\n \n unfolded[s] = ' '.join(reversed(out))\n return ' '.join(reversed(out))\n\n\n\n speller = aspell.Speller('lang', 'en')\n for w in slang:\n speller.addtoSession(w)\n \n with open(tweet_tmp1_dir + fn, 'r') as fin:\n with open(tweet_tmp2_dir + fn, 'w') as fout:\n res = []\n for l in fin:\n prefix = ''\n if 'test' in fn:\n comma = l.find(',')\n prefix = l[:comma].strip()\n l = l[comma+1:]\n try:\n assert(prefix.isdigit())\n except:\n print(prefix, l)\n prefix += ','\n \n ll = ''\n \n ws = [w for w in l.strip().split(' ') if len(w) > 0]\n for w in ws:\n if w in correct_word:\n nw = correct_word[w]\n elif (w.startswith('<') and w.endswith('>')) or w in whitelist or speller.check(w):\n nw = w\n else:\n try:\n nw1, nw2 = speller.suggest(w)[:2]\n nwdist1 = jellyfish.levenshtein_distance(w,nw1)\n nwdist2 = jellyfish.levenshtein_distance(w,nw2)\n \n if nw2.count(' ') < nw1.count(' ') or (nwdist1 > MAX_DIST_CORRECTION and nwdist2 < nwdist1) :\n nw1 = nw2\n nwdist1 = nwdist2\n if nwdist1 <= MAX_DIST_CORRECTION:\n nw = nw1.lower()\n else:\n nw = w.lower()\n except:\n nw = infer_spaces(w)\n if nw.count('.') >= nw.count(' ')/3:\n nw = nw.replace('.', '')\n elif nw.count('-') >= nw.count(' ')/3:\n nw = nw.replace('-', '')\n nw = nw.replace(' ', ' ').lower()\n ll += nw + ' '\n correct_word[w] = nw\n res.append(prefix+ll.strip())\n# fout.write(prefix+ll.strip()+'\\n')\n fout.write('\\n'.join(res))", "def on_correct_answer_select(self, spinner, text):\n\n self.answer = text\n self.multiple_choice_answer = text", "def __editUserPWL(self):\n from QScintilla.SpellChecker import SpellChecker\n pwl = SpellChecker.getUserDictionaryPath()\n self.__editSpellingDictionary(pwl)", "def get_weapon(self):\n\n return self.suggestion_set[1]", "def correct_spell(tweet):\n\n\n tweet = tweet.split()\n for i in range(len(tweet)):\n if tweet[i] in downloaded_dictionary.keys():\n tweet[i] = downloaded_dictionary[tweet[i]]\n tweet = ' '.join(tweet)\n return tweet", "def execute_for_command(self, skill_input: SkillInput, services: AssistantServicesBase):\n voice = skill_input.adjective.lower()\n if voice in (\"female\", \"male\"):\n services.settings_service.voice = voice\n services.settings_service.save_settings()\n services.user_interaction_service.speak('Okay, I will use a %s voice from now on.' % (voice), True)\n else:\n services.user_interaction_service.speak('I don\\'t understand what voice you want')", "def setSpellchecking(self, color=QtCore.Qt.blue):\n self.format.setUnderlineStyle(\n QtGui.QTextCharFormat.SpellCheckUnderline)\n self.format.setUnderlineColor(color)", "def cursor_changed(self, column_side, bypass_selection=\"\"):\n\n column = None\n aligned_column = None\n if column_side == LEFT_TEXT:\n column = self._window.column1\n aligned_column = self._window.column2\n else:\n column = self._window.column2\n aligned_column = self._window.column1\n\n w = None\n if bypass_selection != \"\":\n # bypass the selection and process, used by search_highlight\n w = bypass_selection\n else:\n # else, just select the clicked word\n w = column.align_disp.editor.get_clicked_word()\n\n if w and w != \"\" and w != column.align_disp.currentWord:\n try:\n word, aligned_word, goldsmith_rslt, goldsmith_rslt_2 = self.controller.process_word(w, column_side)\n\n # Highlighting\n column.align_disp.editor.clean_highlight(first_pos=column.align_disp.editor.first_highlighted_block,\n last_pos=column.align_disp.editor.last_highlighted_block)\n column.align_disp.editor.refresh_highlight(word.str)\n aligned_column.align_disp.editor.clean_highlight(first_pos=aligned_column.align_disp.editor.first_highlighted_block,\n last_pos=aligned_column.align_disp.editor.last_highlighted_block)\n aligned_column.align_disp.editor.refresh_highlight(aligned_word.str, color=QtGui.QColor(255, 255, 100))\n\n align_rslt = \"{} : <b>{}</b>\".format(self.model.dist_words[word.str][aligned_word.str], aligned_word.str)\n\n column.info_word.set_word(word.str)\n column.info_word.set_text(align_rslt)\n column.see_also.set_text(goldsmith_rslt)\n column.align_disp.currentWord = word.str\n column.align_disp.sidebar.currentVect = word.pos\n column.align_disp.sidebar.draw_vector()\n\n aligned_column.info_word.set_word(aligned_word.str)\n aligned_column.info_word.set_text(\"See also\")\n # TODO : goldsmith on the second column, maybe paste the code or add eternal function\n aligned_column.see_also.set_text(goldsmith_rslt_2)\n aligned_column.align_disp.currentWord = aligned_word.str\n aligned_column.align_disp.sidebar.currentVect = aligned_word.pos\n aligned_column.align_disp.sidebar.draw_vector()\n\n except WordNotInDatabase:\n column.align_disp.editor.clean_highlight(first_pos=column.align_disp.editor.first_highlighted_block,\n last_pos=column.align_disp.editor.last_highlighted_block)\n aligned_column.align_disp.editor.clean_highlight(first_pos=aligned_column.align_disp.editor.first_highlighted_block,\n last_pos=aligned_column.align_disp.editor.last_highlighted_block)\n column.info_word.set_word(\"Not found\")\n column.info_word.set_text(\"Alignment results\")\n column.see_also.set_text(\"Goldsmith algorithm results\")\n column.align_disp.currentWord = None\n column.align_disp.sidebar.currentVect = [0, 1]\n column.align_disp.sidebar.draw_vector()\n\n aligned_column.info_word.set_word(\"Not found\")\n aligned_column.info_word.set_text(\"See also\")\n aligned_column.see_also.set_text(\"Goldsmith algorithm results\")\n aligned_column.align_disp.currentWord = None\n aligned_column.align_disp.sidebar.currentVect = [0, 1]\n aligned_column.align_disp.sidebar.draw_vector()\n\n except DataNotProcessed:\n column.align_disp.editor.clean_highlight(first_pos=column.align_disp.editor.first_highlighted_block,\n last_pos=column.align_disp.editor.last_highlighted_block)\n aligned_column.align_disp.editor.clean_highlight(first_pos=aligned_column.align_disp.editor.first_highlighted_block,\n last_pos=aligned_column.align_disp.editor.last_highlighted_block)", "def spell(self, word):\n if not self.__isValidInput(word):\n return False\n\n result = self.__lib.voikkoSpellUcs4(self.__handle, word)\n if result == 0:\n return False\n elif result == 1:\n return True\n else:\n raise VoikkoException(\"Internal error returned from libvoikko\")", "def _sense_and_act(self):\n pass", "def ignore(self, event: Event = None) -> None:\n if self.loaded:\n w = self.currentWord\n if w:\n self.spellController.ignore(w)\n self.tab.onFindButton()", "def test_spelling(self) -> None:\n misstakes: Dict[Word, List[str]] = self.report.spellcheck(\n self.rules.spelling_skip_wordclasses\n )\n for word, corrections in misstakes.items():\n if word.text.lower() in self.rules.forbidden_words:\n continue\n if word.text.lower() in [\n ab[\"word\"] for ab in self.rules.police_abbreviations\n ]:\n continue\n error_text: str = f\"Ordet {word.text} รคr felstavat.\"\n if corrections:\n error_text += \" Rรคttningsfรถrslag: \" + \", \".join(corrections) + \".\"\n self.add_error(error_text, word=word)", "def use(target, name):\n return \"You find no use of this item\"", "def onWordRecognised(self, *_args):\n # Unsubscribe to the event when talking,\n # to avoid repetitions\n memory.unsubscribeToEvent(\"WordRecognized\",\"AudioRecognition\")\n\n # We access to the word recognised in the memory\n word = memory.getData(\"WordRecognized\")\n\n # Debug : Print the word recognised\n print(\"Mot :\")\n print(word[0])\n print(\"Indice de confiance :\")\n print(word[1])\n print\n\n\n # We acknoledge a word if the trust is high enough\n if (word[1] > 0.28):\n self.mot = word[0]\n #self.tts.say(\"Le mot reconnu est :\"+self.mot)\n StateManager(self)\n \n\n # Subscribe again to the event\n memory.subscribeToEvent(\"WordRecognized\",\n \"AudioRecognition\",\n \"onWordRecognised\")", "def func(self):\n if not self.args:\n self.msg(\n \"{wYou are currently speaking:{n %s\"\n % self.caller.languages.current_language.capitalize()\n )\n self.list_languages()\n return\n if \"translate\" in self.switches:\n obj = self.caller.search(self.args)\n if not obj:\n return\n translation = obj.item_data.translation\n matches = False\n for lang in self.caller.languages.known_languages:\n if lang in translation:\n self.msg(\n \"You translate the following from %s:\\n%s\"\n % (lang.capitalize(), translation[lang])\n )\n matches = True\n if not matches:\n self.msg(\n \"%s does not seem to contain any foreign tongue you can read.\" % obj\n )\n return\n if not self.switches:\n args = self.args.lower()\n if args == \"arvani\" or args == \"common\":\n self.caller.attributes.remove(\"currently_speaking\")\n self.msg(\"{wYou are now speaking Arvani.{n\")\n return\n if args not in self.caller.languages.known_languages:\n self.msg(\"You cannot speak %s.\" % self.args)\n self.list_languages()\n return\n self.caller.db.currently_speaking = args\n self.msg(\"{wYou are now speaking %s.{n\" % self.args)\n return\n player = self.caller.player.search(self.lhs)\n if not player:\n return\n targ = player.char_ob\n if not targ:\n self.msg(\"Not found.\")\n return\n if \"teachme\" in self.switches:\n if self.caller.languages.additional_languages <= 0:\n self.msg(\n \"You need a higher rank of linguistics before you can learn anything else.\"\n )\n return\n req = targ.ndb.language_requests or {}\n req[self.caller] = self.rhs\n targ.ndb.language_requests = req\n self.msg(\"You request that %s teach you %s.\" % (targ, self.rhs))\n targ.msg(\n \"{w%s has requested that you teach them %s.{n\" % (self.caller, self.rhs)\n )\n return\n if \"teach\" in self.switches:\n req = self.caller.ndb.language_requests or {}\n if targ not in req:\n self.msg(\"You do not have a request from %s.\" % targ)\n return\n lang = req[targ].lower()\n if lang not in self.caller.languages.known_languages:\n self.msg(\"You do not know %s.\" % lang)\n self.list_languages()\n return\n if targ.languages.max_languages <= len(targ.languages.known_languages):\n self.msg(\"They know as many languages as they can learn.\")\n return\n targ.languages.add_language(lang)\n self.msg(\"You have taught %s to %s.\" % (lang, targ))\n targ.msg(\"%s has taught you %s.\" % (self.caller, lang))\n return", "def cmd(name: str) -> Callable:\n return g.new_cmd_decorator(name, ['c', 'spellCommands',])", "def test_selection_name(self):\n skill = create_skill()\n skill.speak = mock.Mock()\n skill.get_response = mock.Mock()\n\n skill.get_response.return_value = 'octopus'\n\n options = ['a balloon', 'an octopus', 'a piano']\n response = skill.ask_selection(options, 'which is better')\n self.assertEqual(options[1], response)\n\n # Assert that the spoken sentence contains all options.\n spoken_sentence = skill.speak.call_args[0][0]\n for opt in options:\n self.assertTrue(opt in spoken_sentence)", "def use_skill(self, g, i, x, y):\n # @ param g a reference to the game engine\n # @ param i the index of the skill (basically what skill)\n # @ param x the x target coordinate in game pixels\n # @ param y the y target coordinate in game pixels\n if self.attackTimer < self.attackDelay:\n print(\"attack on CD\")\n return\n \n if self.skill[i].skillAttr == 0:\n g.fire_skill_sound.play()\n elif self.skill[i].skillAttr == 1:\n g.ice_skill_sound.play()\n elif self.skill[i].skillAttr == 2:\n g.lightning_skill_sound.play()\n elif self.skill[i].skillAttr == 3:\n g.poison_skill_sound.play()\n \n \n if self.skill[i].skillKey == 0: #Aura\n #turn the aura on/off\n if self.skill[i].active == False:\n #print(\"aura on\")\n self.skill[i].active = True\n else:\n self.skill[i].active = False\n #print(\"aura off\")\n \n elif self.skill[i].skillKey == 1: #Missile\n if self.mana[0] > self.skill[i].skillCost:\n self.mana[0] -= self.skill[i].skillCost\n self.attackTimer = 0\n target = Target(x, y)\n center_x = self.rect.x + (self.rect.width / 2)\n center_y = self.rect.y + (self.rect.height / 2)\n #bullet types: fire 5, ice 6, lightning 7\n #skill types: fire 0, ice 1, lightning 2\n g.bullets.append(self.bulletFactory.createBullet(g, self.skill[i].skillAttr + 5, 0, self.attack, 1024, target, center_x, center_y))\n #print(\"missile\")\n\n elif self.skill[i].skillKey == 2: #Breath\n #for each creep in the AoE cone, do damage.\n if self.mana[0] > self.skill[i].skillCost:\n self.mana[0] -= self.skill[i].skillCost\n self.attackTimer = 0\n #get low and high angle (-45 degrees and +45 degrees from player -> point angle)\n lowAngle = math.atan2(y - self.rect.centery, x - self.rect.centerx) - 3.1415 / 2.0\n highAngle = math.atan2(y - self.rect.centery, x - self.rect.centerx) + 3.1415 / 2.0\n for creep in g.creeps:\n #get angle to creep\n creepAngle = math.atan2(creep.rect.centery - self.rect.centery, creep.rect.centerx - self.rect.centerx)\n \n #if angle to the creep is between the two angles\n if creepAngle > lowAngle and creepAngle < highAngle:\n #and the distance to the creep is below the skill's range\n if ( (creep.rect.centerx - self.rect.centerx) ** 2 + (creep.rect.centery - self.rect.centery) ** 2 ) ** 0.5 < 4 * 24:\n creep.take_damage( self.attack )\n #print(\"breath\")\n #apply debuffs, based on type\n if self.skill[i].skillAttr == 0: #fire\n creep.applyBurning()\n elif self.skill[i].skillAttr == 1: #frost\n creep.applyChilled()\n elif self.skill[i].skillAttr == 2: #lightning\n creep.applyShocked()", "def choice1(choice, ghost):\n if choice == \"1\":\n if \"flashlight\" not in items:\n print_pause(\"The Railway station is really foggy\", 2)\n print_pause(\"You can see absolute nothing\", 2)\n print_pause(\"You walk on and find a flashlight\", 2)\n light_choice(ghost)\n else:\n print_pause(\n \"You already pasted this way\\nPlease choose another way!\", 2)\n logic(ghost)", "def handle_suggest():\n return 0", "def google_suggest(self, callback, who, arg, store=True):\n\t\t\n sugs = self.get_xml('http://google.com/complete/search', {'output':'toolbar', 'q': arg})\n\n if sugs is not None:\n try:\n sugs = [x[0].get('data') for x in sugs]\n except Exception, e:\n print \"XML error with Google Suggest: %s\" % e\n\t\t\t\n suggestions = self.remove_lyrics(sugs)\n random_sug = choice(suggestions)\n\t\t\t\n # Same string as we started with - roll again\n if random_sug == arg:\n try:\n suggestions.pop(suggestions.index(random_sug))\n except:\n pass\n random_sug = choice(suggestions)\n\t\t\t\t\n if random_sug is not None:\n if store:\n self.store_suggestion(who, arg)\n random_sug.strip('')\n random_sug.strip('\\r')\n w = random_sug.split()\n if w[0].lower() in ('what', 'why', 'was', 'where', 'who', 'which', 'whom', 'when', 'how', 'is', 'are', 'did'):\n if '?' not in w[-1:]:\n random_sug = random_sug + '?'\n return random_sug", "def choose_word(word_list):\n word = random.choice(word_list)\n word = word.lower()\n return word", "def select(self, coord: Coord) -> None:\n active_team = self.units_manager.active_team\n self.prev_sel = self.curr_sel\n self.curr_sel = coord\n\n if self.prev_sel is None:\n # Nothing has been previously selected\n sel_unit = self.get_unit(coord)\n if sel_unit is None or sel_unit.played:\n self.move_area = []\n self.attack_area = []\n self.update_highlight()\n else:\n # Show the currently selected unit's move and attack area\n self.update_move_area()\n self.move_attack_area()\n self.update_highlight()\n else:\n # Something has been previously selected\n if self.prev_unit is not None and self.curr_unit is not None:\n # Selected a unit two times\n if self.prev_sel == self.curr_sel and not self.prev_unit.played and active_team.is_mine(self.prev_unit):\n # Two times on the same playable unit. Show the action menu.\n self.action_menu()\n elif self.curr_sel in self.attack_area:\n # Two different units: prev_unit can attack curr_unit\n # This results in a combined action: move the unit next to the enemy and propose the user to attack\n target_unit = self.curr_unit\n nearest = self.arrow.path[-1] if self.arrow.path else self.prev_sel\n if self.nearby_enemies(self.prev_unit, nearest):\n animation = self.make_move_unit_animation(self.prev_unit, nearest, self.arrow.path)\n self.add_move_unit_animation(animation)\n self.move_unit(self.prev_unit, nearest)\n self.curr_sel = nearest # otherwise move_undo will move back the defending unit!\n self.still_attack_area()\n self.update_highlight()\n self.action_menu(attacking=self.curr_unit, defending=target_unit)\n else:\n self.reset_selection()\n else:\n # Two different units: prev_unit can't attack curr_unit\n # show the current unit's move and attack area\n self.update_move_area()\n self.move_attack_area()\n self.update_highlight()\n elif self.can_selection_move():\n # Move the previously selected unit to the currently selected coordinate.\n animation = self.make_move_unit_animation(self.prev_unit, self.curr_sel, self.arrow.path)\n self.add_move_unit_animation(animation)\n self.move_unit(self.prev_unit, self.curr_sel)\n self.still_attack_area()\n self.update_highlight()\n self.action_menu()\n else:\n # Previously something irrelevant was chosen\n self.reset_selection()\n self.curr_sel = coord\n\n if self.curr_unit is not None and not self.curr_unit.played:\n # Selected a unit: show its move and attack area\n self.update_move_area()\n self.move_attack_area()\n\n self.update_highlight()\n\n self.arrow.set_path([])", "def check(self, word):\n spellings = []\n # word in lexicon\n if word in self._lexicon:\n spellings.append(word)\n # word not in lexicon\n else:\n for item in self._lexicon:\n dist = self.min_edit_distance(word, item)\n if dist == 1:\n spellings.append(item)\n\n return spellings", "def pull_suggestion(self, callback, who, arg):\n\t\t\n random_sug = self.dong.db.get_random_row('suggest')\n res = self.google_suggest(callback, who, random_sug[2], False)\n\t\t\n w = res.split()\n if w[0].lower() in ('what', 'why', 'was', 'where', 'who', 'which', 'whom', 'when', 'how', 'is', 'are', 'did'):\n if w[-1:] != '?':\n res = res + '?'\n return res.capitalize()", "def check_target(event):\n target_text.set(target_text.get().lower())", "def update_word(self, word):\n self.word = word", "def radioButtonWeapon_Clicked( self, event ):\n\t\tself.activateTreasureBox(1)", "def use(self, target, name):\n return self.usable.use(target, name)", "def wrapped(widget):\n if r:\n obj.player_choice = random.choice(list(evilrps.Throws))\n else:\n # print(widget, 'throw', throw)\n obj.player_choice = throw\n obj.advance()", "def get_stopwords(choice = 0):\n low_acc_words = [u'orange', u'game', u'wafe', u'gold', u'gas pump', u'dock', u'magnetic disk', u'beard', u'splash', u'stethoscope', u'clock', u'modem', u'spring', u'dribble', u'scale', u'thing', u'parachute', u'screw', u'haired', u'hair spray', u'stick', u'projectile', u'surface', u'scarf', u'boat', u'lantern', u'weapon', u'fire screen', u'maypole', u'Old World buffalo', u'backpack', u'velvet', u'pistol', u'duplicator', u'tissue', u'holding', u'eel', u'iron', u'zoo', u'toilet seat', u'eye', u'telephone', u'drum', u'pepper', u'church', u'pillow', u'body', u'mink', u'prison', u'color', u'jewelry', u'elephant', u'mug', u'cargo ship', u'football', u'llama', u'wombat', u'ax', u'giant panda', u'bison', u'climber', u'tractor', u'hamster', u'beetle', u'sidewalk', u'oilseed', u'shore', u'feet', u'vending machine', u'nail', u'lock', u'licking', u'crowded', u'pudding', u'library', u'sliding', u'steel drum', u'cutter', u'trench coat', u'plate rack', u'fancy', u'barbershop', u'switch', u'hip', u'petting', u'keyboard', u'drilling platform', u'denim', u'old', u'sewing machine', u'dancing', u'lawn mower', u'jaguar', u'cauliflower', u'bubble', u'tray', u'printer', u'hillside', u'heater', u'store', u'stove', u'hook', u'bed', u'book jacket', u'rain barrel', u'dinosaur', u'rowing', u'surf', u'worm', u'garbage truck', u'laptop', u'mouth', u'flute', u'tape player', u'gym', u'large', u'birdhouse', u'covered', u'groom', u'swan', u'lampshade', u'snowplow', u'ramp', u'bathing cap', u'strainer', u'hard', u'mortarboard', u'penguin', u'wooden spoon', u'loaf of bread', u'window', u\"potter's wheel\", u'branch', u'fly', u'greyhound', u'walk', u'starfish', u'kitchen', u'parking meter', u'cassette', u'work', u'cash machine', u'custard apple', u'play', u'ice cream', u'mosque', u'market', u'swing', u'hay', u'fan', u'surfer', u'number', u'climb', u'golfcart', u'burrito', u'feather boa', u'resting', u'neck brace', u'glove', u'remote control', u'lotion', u'lamp', u'perched', u'jeep', u'necklace', u'shopping basket', u'sea urchin', u'pajama', u'pinwheel', u'foot', u'maze', u'squash', u'dishrag', u'bib', u'ant', u'dumbbell', u'dragonfly', u'bakery', u'lighter', u'salamander', u'sandglass', u'apron', u'cannon', u'palm', u'tent', u'spacecraft', u'oil filter', u'beer bottle', u'throne', u'stretcher', u'bedroom', u'pan', u'camera', u'kiddie', u'mashed potato', u'railing', u'tongue', u'sky', u'event', u'bright', u'curb', u'sundial', u'screwdriver', u'hand blower', u'joystick', u'flower', u'tv', u'back', u'smile', u'mortar', u'bee', u'bath', u'spatula', u'lawn', u'object', u'barrier', u'mailbox', u'fallen', u'crayfish', u'kid', u'metal', u'shot', u'quill', u'snowboarding', u'mud', u'vacuum', u'water tower', u'sleeping bag', u'altar', u'bassoon', u'family', u'shovel', u'leather', u'maillot', u'soap dispenser', u'blurry', u'racetrack', u'dish', u'gondola', u'chewing', u'badger', u'spindle', u'door', u'shaker', u'purse', u'apiary', u'bus', u'wreck', u'cell', u'balance beam', u'lip', u'animal', u'baby', u'toilet', u'armor plate', u'jigsaw puzzle', u'piggy bank', u'leafhopper', u'torch', u'ashcan', u'talking', u'traveling', u'handrail', u'area', u'raft', u'can opener', u'missile', u'syringe', u'pen', u'beacon', u'croquet ball', u'trail', u'snowboard', u'light', u'owl', u'lift', u'acorn', u'pencil box', u'hermit crab', u'binder', u'ladle', u'fire engine', u'tan', u'volcano', u'chocolate sauce', u'crossword puzzle', u'whistle', u'floating', u'forklift', u'hotdog', u'monotreme', u'eggnog', u'traffic', u'envelope', u'surfboard', u'face', u'polecat', u'tiled', u'camel', u'refrigerator', u'carousel', u'parking', u'spider web', u'stream', u'train', u'square', u'candle', u'thimble', u'jellyfish', u'teddy', u'leash', u'wild', u'shopping cart', u'jackfruit', u'office', u'alligator', u'ready', u'end', u'power drill', u'lens cap', u'looking', u'hand', u'fountain', u'radiator', u'French horn', u'graze', u'female', u'koala', u'paper towel', u'artichoke', u'passenger', u'airship', u'cow', u'slug', u'home', u'tug', u'weasel', u'including', u'crutch', u'submarine', u'chime', u'pretty', u'phone', u'barrow', u'purple', u'pulling', u'wing', u'mongoose', u'washer', u'slide', u'Band Aid', u'splashing', u'obstacle', u'flying', u'restaurant', u'pencil sharpener', u'control', u'something', u'tricycle', u'motor', u'watching', u'grey', u'balcony', u'surrounded', u'statue', u'rotisserie', u'puck', u'assorted', u'umbrella', u'measuring cup', u'hanging', u'ride', u'scuba', u'perform', u'tusker', u'desk', u'puddle', u'sea slug', u'team', u'beaker', u'held', u'safe', u'shower curtain', u'isopod', u'tire', u'beaver', u'tower', u'stump', u'dinner', u'conch', u'playground', u'marmot', u'fruit', u'golf ball', u'read', u'tile', u'watch', u'mosquito net', u'goggle', u'swab', u'cricket', u'wheelie', u'guacamole', u'bush', u'cockroach', u'intersection', u'letter opener', u'station', u'plow', u'course', u'aeroplane', u'view', u'racing', u'broom', u'sunny', u'corn', u'matchstick', u'variety', u'messy', u'playpen', u'ambulance', u'perfume', u'brush', u'go', u'shelf', u'look', u'blowing', u'lobster', u'lettuce', u'busy', u'digging', u'trampoline', u'track', u'glass', u'ox', u'handstand', u'assortment', u'vase', u'aircraft carrier', u'microwave', u'high', u'mousetrap', u'bathroom', u'shower cap', u'counter', u'Christmas stocking', u'safety pin', u'plastic', u'garden', u'transit', u'knife', u'docked', u'cluttered', u'serving', u'toddler', u'ledge', u'formation', u'snorkel', u'lying', u'lemon', u'ladybug', u'carry', u'solar dish', u'hammer', u'sleeping', u'saltshaker', u'cowboy', u'unicycle', u'single', u'rule', u'shoji', u'business', u'cup', u'antique', u'catch', u'open', u'carnival', u'cooking', u'rural', u'small', u'wine', u'top', u'flat', u'yurt', u'grasshopper', u'hoop', u'wallet', u'hold', u'someone', u'necked', u'salad', u'leafe', u'paddlewheel', u'porcupine', u'radio telescope', u'preparing', u'canopy', u'pointing', u'honeycomb', u'older', u'hair slide', u'plunger', u'mirror', u'landscape', u'bow', u'cart', u'skateboard', u'device', u'urban', u'sunset', u'attached', u'toward', u'right', u'town', u'four', u'beach wagon', u'close', u'lone', u'chew', u'pile', u'working', u'bottlecap', u'corner', u'swinging', u'behind', u'slot machine', u'food', u'mushroom', u'around', u'tall', u'oxygen mask', u'together', u'veggy', u'skating', u'concrete', u'subway', u'seen', u'head', u'armadillo', u'ly', u'kitten', u'cap', u'painted', u'mustache', u'moving', u'lit', u'sliced', u'sticking', u'milk can', u'roller', u'stainless', u'teeth', u'seated', u'serve', u'lady', u'carriage', u'stand', u'apple', u'paper', u'apartment', u'video', u'eating', u'stadium', u'turn', u'racket', u'stunt', u'plate', u'drinking', u'slice', u'warplane', u'cheese', u'onion', u'backyard', u'coffee', u'peach', u'staring', u'outfit', u'engine', u'coaster', u'striped', u'stacked', u'decorated', u'throwing', u'dirty', u'hula', u'mid', u'catching', u'closed', u'item', u'otter', u'rail', u'tenni', u'sink', u'toaster', u'meal', u'skate', u'fridge', u'pitch', u'kite', u'desktop', u'meat', u'military', u'fireplace', u'show', u'rider', u'rodeo', u'graffiti', u'bunch', u'coming', u'reading', u'walkway', u'another', u'mouse', u'soup', u'hole', u'steel', u'container', u'past', u'carrying', u'equipment', u'farm', u'dressed', u'scooter', u'cellphone', u'stuffed', u'commercial', u'platform', u'full', u'one', u'electronic', u'sprinkler', u'stop', u'along', u'blanket', u'residential', u'kneeling', u'blender', u'oven', u'cattle', u'skateboarder', u'produce', u'book', u'cement', u'bag', u'carrot', u'board', u'round', u'many', u'giant', u'shower', u'asian', u'picnic', u'dining', u'wedding', u'desert', u'huge', u'narrow', u'outside', u'deck', u'three', u'display', u'filled', u'cutting', u'colored', u'ear', u'feeding', u'across', u'eat', u'skateboarding', u'fighter', u'sun', u'darkened', u'brushing', u'ty', u'party', u'pedestrian', u'wet', u'structure', u'different', u'crossbone', u'jet', u'public', u'cooked', u'airplane', u'bread', u'clothe', u'tunnel', u'fishing', u'drife', u'gear', u'birthday', u'frisbee', u'piece', u'row', u'hydrant', u'drawn', u'meter', u'vegetable', u'broccoli', u'country', u'half', u'sandwich', u'doorway', u'lot', u'pair', u'luggage', u'long', u'christma', u'wii', u'guy', u'side', u'leap', u'plane', u'silver', u'post', u'bar', u'reaching', u'drink', u'reflection', u'wand', u'airport', u'photograph', u'type', u'lay', u'lap', u'waterfall', u'banana', u'next', u'baseball', u'hot', u'making', u'gray', u'using', u'batter', u'empty', u'bat', u'clear', u'hospital', u'scissor', u'neck', u'cake', u'alone', u'rope', u'winter', u'runway', u'broken', u'fire', u'getting', u'variou', u'distance', u'beer', u'outstretched', u'chocolate', u'match', u'stopped', u'vintage', u'clean', u'fork', u'cut', u'eaten', u'waiting', u'going', u'onto', u'nintendo', u'time', u'several', u'lined', u'railroad', u'case', u'mother', u'suitcase', u'taking', u'doughnut', u'smoke', u'controller', u'crossing', u'friend', u'closeup', u'couple', u'showing', u'made', u'big', u'trying', u'putting', u'hit', u'male', u'', u'pickelhaube', u'suburban', u'costume', u'enjoy', u'new', u'studio', u'mantis', u'pastum', u'gymnast', u'rafting', u'golden', u'waffle iron', u'watering', u'overhead', u'shoot', u'feature', u'machine', u'attempt', u'third', u'tulip', u'jungle', u'wind', u'fig', u'band', u'bone', u'free', u'cucumber', u'bouncing', u'boarding', u'tackled', u'__background__', u'gymnastic apparatus', u'pineapple', u'folded', u'rice', u'sunglasses', u'cushion', u'net', u'covering', u'pretzel', u'steam', u'santum', u'fair', u'sail', u'score', u'toothbrush', u'loaded', u'fry', u'life', u'glider', u'bounce', u'balance', u'cone', u'containing', u'beside', u'wheel', u'rain', u'spaghetti squash', u'thi', u'left', u'photographer', u'forested', u'vanity', u'shoulder', u'pavement', u'officer', u'creek', u'dead', u'ice', u'slide rule', u'dunking', u'horizon', u'raised', u'fabric', u'fight', u'way', u'war', u'landing', u'umpire', u'fashioned', u'dimly', u'topped', u'setting', u'sling', u'potato', u'painting', u'bottom', u'dance', u'crocodile', u'string', u'dig', u'gun', u'chicken', u'tarmac', u'falling', u'french', u'wait', u'pony', u'decker', u'plaza', u'earphone', u'chip', u'get', u'staircase', u'wakeboarder', u'wheelchair', u'pulled', u'polouse', u'still', u'curly', u'scaling', u'lunch', u'base', u'pizza', u'meat loaf', u'shown', u'opened', u'space', u'mess', u'headband', u'place', u'pelican', u'ring', u'sheet', u'bite', u'frame', u'hug', u'wide', u'lick', u'pastry', u'breakfast', u'take', u'topping', u'multiple', u'knee', u'tackling', u'sale', u'professional', u'german', u'crane', u'snack', u'stair', u'ping-pong ball', u'snowsuit', u'sport', u'bicyclist', u'skyscraper', u'checkered', u'restroom', u'tour', u'nearby', u'foggy', u'bmx', u'newspaper', u'mound', u'foam', u'driven', u'mohawk', u'rest', u'instrument', u'chainsaw', u'towel', u'facing', u'audience', u'served', u'clau', u'go-kart', u'tube', u'throw', u'muddy', u'harness', u'strip', u'racquet', u'prepare', u'low', u'pitcher', u'cardoon', u'gymnasium', u'pull', u'arranged', u'strawberry', u'deep', u'cream', u'rubber', u'trash', u'midair', u'peak', u'remote', u'disc', u'follow', u'potpie', u'enjoying', u'stool', u'leaping', u'action', u'taken', u'chopstick', u'flag', u'mounted', u'grill', u'wrestler', u'marble', u'backpacking', u'breaking', u'fungus', u'shade', u'egg', u'muzzled', u'style', u'carpeted', u'sauce', u'snowball', u'abacus', u'foreground', u'circuit', u'leading', u'airborne', u'hotel', u'leotard', u'kind', u'double', u'scabbard', u'bride', u'stall', u'blond', u'cave', u'electric', u'cigarette', u'sponsored', u'shepherd', u'dandelion', u'catcher', u'movie', u'recently', u'floaty', u'chambered nautilus', u'hitting', u'racer', u'passing', u'leaning', u'kissing', u'chase', u'funny', u'used', u'snail', u'pomegranate', u'stack', u'center', u'grind', u'bin', u'formal', u'shaped', u'signal', u'zucchini', u'parade', u'limb', u'laughing', u'step', u'range', u'slouse', u'block', u'downhill', u'jockey', u'retrieving', u'atop', u'cloth', u'skull', u'diving', u'rainy', u'tarp', u'black-footed ferret', u'nice', u'prepared', u'hot pot', u'land', u'fresh', u'hello', u'wrestle', u'kitty', u'spoon', u'rack', u'smaller', u'hose', u'giving', u'attire', u'leaving', u'chiton', u'singing', u'frog', u'crab', u'porch', u'saddle', u'donut', u'crossed', u'tied', u'tomato', u'chasing', u'scenic', u'beneath', u'boarder', u'hippopotamus', u'wading', u'sea_anemone', u'wrapped', u'shallow', u'steep', u'bagel', u'gather', u'pipe', u'hi', u'ha', u'jar', u'bug', u'finger', u'handle', u'beam', u'bean', u'whilst', u'contain', u'shake', u'attempting', u'merry', u'yawning', u'sniff', u'swimmer', u'commuter', u'bull', u'smoking', u'plain', u'cross', u'member', u'binoculars', u'underneath', u'well', u'fighting', u'bandanna', u'rocket', u'pay-phone', u'five', u'puppy', u'like', u'campfire', u'shaking', u'construction', u'bun', u'partially', u'flip', u'placed', u'bearing', u'pinatum', u'pie', u'boardwalk', u'pit', u'star', u'baked']\n\n STOPWORDS = ['none','inside', 'near', 'one', 'two', 'three', 'day', 'front', u'i', u'me', u'my', u'myself', u'we', u'our', u'ours', u'ourselves', u'you', u'your', u'yours', u'yourself', u'yourselves', u'he', u'him', u'his', u'himself', u'she', u'her', u'hers', u'herself', u'it', u'its', u'itself', u'they', u'them', u'their', u'theirs', u'themselves', u'what', u'which', u'who', u'whom', u'this', u'that', u'these', u'those', u'am', u'is', u'are', u'was', u'were', u'be', u'been', u'being', u'have', u'has', u'had', u'having', u'do', u'does', u'did', u'doing', u'a', u'an', u'the', u'and', u'but', u'if', u'or', u'because', u'as', u'until', u'while', u'of', u'at', u'by', u'for', u'with', u'about', u'against', u'between', u'into', u'through', u'during', u'before', u'after', u'above', u'below', u'to', u'from', u'up', u'down', u'in', u'out', u'on', u'off', u'over', u'under', u'again', u'further', u'then', u'once', u'here', u'there', u'when', u'where', u'why', u'how', u'all', u'any', u'both', u'each', u'few', u'more', u'most', u'other', u'some', u'such', u'no', u'nor', u'not', u'only', u'own', u'same', u'so', u'than', u'too', u'very', u's', u't', u'can', u'will', u'just', u'don', u'should', u'now', 'background', '__background__', '']\n \n\n unselected_words = [u'', u'pickelhaube', u'enjoy', u'new', u'studio', u'kissing', u'mantis', u'pastum', u'rafting', u'golden', u'waffle iron', u'watering', u'overhead', u'shoot', u'feature', u'machine', u'pizza', u'attempt', u'third', u'tulip', u'jungle', u'wind', u'fig', u'band', u'bone', u'free', u'bouncing', u'boarding', u'tackled', u'__background__', u'gymnasium', u'gymnastic apparatus', u'pineapple', u'folded', u'rice', u'sunglasses', u'cushion', u'net', u'covering', u'pretzel', u'steam', u'santum', u'fair', u'sail', u'score', u'toothbrush', u'loaded', u'fry', u'life', u'glider', u'balance', u'cone', u'containing', u'beside', u'wheel', u'rain', u'spaghetti squash', u'thi', u'left', u'photographer', u'forested', u'vanity', u'shoulder', u'pavement', u'officer', u'creek', u'dead', u'slide rule', u'dunking', u'horizon', u'raised', u'fabric', u'fight', u'way', u'war', u'landing', u'umpire', u'fashioned', u'dimly', u'topped', u'setting', u'sling', u'potato', u'bottom', u'dance', u'crocodile', u'ice', u'string', u'dig', u'gun', u'tarmac', u'falling', u'french', u'wait', u'decker', u'earphone', u'chip', u'get', u'staircase', u'wakeboarder', u'wheelchair', u'pulled', u'polouse', u'still', u'curly', u'scaling', u'lunch', u'meat loaf', u'shown', u'opened', u'space', u'mess', u'headband', u'place', u'pelican', u'ring', u'sheet', u'bite', u'hug', u'wide', u'lick', u'pastry', u'breakfast', u'take', u'topping', u'multiple', u'knee', u'bicyclist', u'sale', u'professional', u'german', u'snack', u'stair', u'ping-pong ball', u'snowsuit', u'sport', u'tackling', u'skyscraper', u'checkered', u'restroom', u'tour', u'nearby', u'foggy', u'bmx', u'newspaper', u'mound', u'chopstick', u'foam', u'driven', u'passing', u'mohawk', u'rest', u'instrument', u'chainsaw', u'towel', u'facing', u'audience', u'laughing', u'served', u'clau', u'diving', u'go-kart', u'tube', u'throw', u'harness', u'strip', u'racquet', u'prepare', u'low', u'pitcher', u'cardoon', u'pull', u'arranged', u'strawberry', u'deep', u'cream', u'rubber', u'trash', u'midair', u'peak', u'remote', u'suburban', u'disc', u'follow', u'potpie', u'gymnast', u'enjoying', u'stool', u'leaping', u'action', u'taken', u'flag', u'mounted', u'grill', u'wrestler', u'marble', u'pony', u'backpacking', u'breaking', u'fungus', u'shade', u'egg', u'style', u'carpeted', u'sauce', u'snowball', u'abacus', u'foreground', u'base', u'circuit', u'leading', u'airborne', u'hotel', u'leotard', u'kind', u'double', u'scabbard', u'bride', u'stall', u'blond', u'cave', u'zucchini', u'electric', u'cigarette', u'sponsored', u'shepherd', u'dandelion', u'catcher', u'movie', u'recently', u'floaty', u'chambered nautilus', u'hitting', u'racer', u'leaning', u'chase', u'funny', u'used', u'snail', u'pomegranate', u'cucumber', u'stack', u'center', u'grind', u'bin', u'formal', u'shaped', u'signal', u'parade', u'bounce', u'step', u'plaza', u'range', u'slouse', u'block', u'downhill', u'jockey', u'retrieving', u'atop', u'cloth', u'crane', u'skull', u'rainy', u'tarp', u'black-footed ferret', u'nice', u'prepared', u'hot pot', u'land', u'fresh', u'hello', u'wrestle', u'kitty', u'spoon', u'muzzled', u'rack', u'smaller', u'hose', u'giving', u'attire', u'leaving', u'chiton', u'limb', u'singing', u'frog', u'crab', u'porch', u'donut', u'crossed', u'tied', u'tomato', u'chasing', u'scenic', u'beneath', u'shaking', u'boarder', u'hippopotamus', u'wading', u'sea_anemone', u'wrapped', u'shallow', u'steep', u'bagel', u'gather', u'pipe', u'construction', u'painting', u'chicken', u'jar', u'bug', u'finger', u'handle', u'beam', u'bean', u'whilst', u'contain', u'costume', u'frame', u'shake', u'attempting', u'merry', u'yawning', u'sniff', u'swimmer', u'muddy', u'commuter', u'bull', u'smoking', u'plain', u'cross', u'member', u'binoculars', u'underneath', u'well', u'fighting', u'bandanna', u'rocket', u'pay-phone', u'five', u'puppy', u'like', u'campfire', u'saddle', u'hi', u'bun', u'ha', u'partially', u'flip', u'placed', u'bearing', u'pinatum', u'pie', u'boardwalk', u'pit', u'star', u'baked', u'smoke', u'hospital', u'type', u'hole', u'wand', u'chocolate sauce', u'haired', u'onto', u'drawn', u'wear', u'loaf of bread', u'beer', u'mushroom', u'lift', u'make', u'mother', u'cowboy', u'fork', u'otter', u'playpen', u'alone', u'hamburger', u'bottlecap', u'soup', u'cutter', u'square', u'friend', u'scuba', u'hockey', u'wheelie', u'picnic', u'tug', u'squash', u'case', u'inflatable', u'railroad', u'competition', u'slice', u'broken', u'jeep', u'trying', u'apartment', u'chewing', u'grasshopper', u'guacamole', u'splash', u'male', u'dishrag', u'kayaking', u'acorn', u'snowbank', u'clean', u'hit', u'batter', u'kick', u'jewelry', u'fighter', u'cooked', u'putting', u'try', u'wallet', u'mustache', u'artichoke', u'spaghetti sauce', u'crossing', u'retriever', u'veggy', u'produce', u'darkened', u'kiddie', u'mashed potato', u'closed', u'canopy', u'runway', u'vintage', u'fishing', u'doughnut', u'onion', u'leap', u'rodeo', u'cricket', u'made', u'closeup', u'chew', u'sliced', u'hot', u'deck', u'French horn', u'clothe', u'goggle', u'rowing', u'milk can', u'post', u'outstretched', u'chocolate', u'making', u'course', u'hula', u'carry', u'upside', u'desktop', u'lobster', u'suitcase', u'crossbone', u'ty', u'sea slug', u'polecat', u'sandwich', u'racetrack', u'lettuce', u'cockroach', u'toward', u'eaten', u'blender', u'giant', u'atv', u'big', u'holster', u'splashing', u'commercial', u'tunnel', u'bend', u'meter', u'including', u'badger', u'beach wagon', u'beard', u'beak', u'controller', u'match', u'buckle', u'hiker', u'barometer', u'bread', u'serve', u'object', u'stadium', u'tank', u'waterfall', u'stream', u'neck', u'serving', u'manhole cover', u'pitch', u'pistol', u'dribble', u'isopod', u'transit', u'dragonfly', u'huge', u'backyard', u'foot', u'jet', u'dancing', u'custard apple', u'porcupine', u'assorted', u'rope', u'cut', u'showing', u'lemon', u'armadillo', u'salad', u'carrot', u'biting', u'bee', u'hammer', u'lens cap', u'cauliflower', u'kicking', u'denim', u'marmot', u'nintendo', u'fireplace', u'landscape', u'turn', u'hoop', u'wedding', u'eggnog', u'antique', u'bow', u'winter', u'stacked', u'purse', u'beaver', u'kneeling', u'island', u'slot machine', u'Christmas stocking', u'public', u'narrow', u'ladybug', u'stopped', u'burrito', u'necked', u'cheese', u'crayfish', u'single', u'getting', u'tan', u'lined', u'handstand', u'letter opener', u'pencil box', u'doorway', u'leafhopper', u'residential', u'slug', u'eat', u'carriage', u'end', u'lap', u'distance', u'mink', u'sleeping bag', u'time', u'container', u'stunt', u'drife', u'broccoli', u'docked', u'structure', u'cooker', u'go', u'aircraft carrier', u'pudding', u'tape player', u'outfit', u'coaster', u'reaching', u'meat', u'splashed', u'hair slide', u'roller', u'submarine', u'toaster', u'dining', u'rotisserie', u'football', u'spindle', u'christma', u'thimble', u'giant panda', u'pedestrian', u'compass', u'squirrel', u'sea urchin', u'hotdog', u'peach', u'warplane', u'oil filter', u'waiting', u'hip', u'jaguar', u'mortar', u'gear', u'sprinkler', u'beer bottle', u'gondola', u'half', u'stainless', u'military', u'electronic', u'bat', u'handrail', u'perform', u'coffee maker', u'flat', u'round', u'meal', u'telephone', u'pool table', u'seagull', u'hermit crab', u'fancy', u'obstacle', u'honeycomb', u'gravel', u'ladle', u'farm', u'crossword puzzle', u'steel', u'drink', u'pepper', u'tongue', u'owl', u'rule', u'gym', u'seated', u'monotreme', u'cattle', u'water tower', u'vegetable', u'eel', u'variou', u'messy', u'raft', u'castle', u'fire', u'bib', u'skunk', u'gray', u\"carpenter's kit\", u'wombat', u'carnival', u'equipment', u'mousetrap', u'joystick', u'golf ball', u'shoji', u'banana', u'clear', u'sloth', u'glove', u'reel', u'desert', u'necklace', u'ear', u'digging', u'rural', u'asian', u'school', u'wreck', u'coffee', u'hydrant', u'mouse', u'mid', u'row', u'puddle', u'engine', u'mongoose', u'stopwatch', u'walkway', u'past', u'beacon', u'koala', u'lip', u'gold', u'scooter', u'puck', u\"potter's wheel\", u'ly', u'oilseed', u'tire', u'drum', u'party', u'radio telescope', u'worm', u'lay', u'magnetic disk', u'bar', u'butterfly', u'dinner', u'birthday', u'power drill', u'saltshaker', u'thing', u'ant', u'lantern', u'hard', u'weasel', u'ridden', u'paddlewheel', u'drilling platform', u'climber', u'safe', u'shower', u'airship', u'cassette player', u'printer', u'wooden spoon', u'bassoon', u'reflection', u'scissor', u'apiary', u'ice cream', u'rider', u'boathouse', u'mud', u'corn', u'guinea pig', u'snow leopard', u'mailbox', u'cement', u'bakery', u'taking', u'variety', u'swan', u'velvet', u'couple', u'fridge', u'strainer', u'dirty', u'screwdriver', u'jigsaw puzzle', u'device', u'alligator', u'oven', u'silver', u'urban', u'country', u'opener', u'leather', u'barrel', u'duck', u'drumstick', u'cake', u'ambulance', u'pencil sharpener', u'barrier', u'safety pin', u'right', u'baseball', u'beetle', u'ax', u'cassette', u'assortment', u'entree', u'armor plate', u'going', u'cart', u'can opener', u'curve', u'pointing', u'dribbling', u'sock', u'home', u'catching', u'church', u'mosque', u'measuring cup', u'striped', u'throne', u'skating', u'sundial', u'CD player', u'grille', u'brushing', u'jersey', u'plunger', u'conch', u'several', u'shaker', u'tile', u'stretcher', u'tower', u'plane', u'salamander', u'lock', u'platform', u'airport', u'hamster', u'graffiti', u'jackfruit', u'cabbage', u'blowing', u'kitten', u'yurt', u'cannon', u'powder', u'sea cucumber', u'sea cow', u'dinosaur', u'racing', u'primate', u'wii', u'skateboarding', u'blanket', u'mug', u'cap', u'challenging', u'throwing', u'library', u'quill', u'trench coat', u'microwave', u'tusker', u'cluttered', u'apple', u'duplicator', u'broom', u'wet', u'altar', u'show', u'heater', u'radiator', u'cargo ship', u'spatula', u'screw', u'neck brace', u'flute', u'peacock', u'sewing machine', u'reading', u'dough', u'rifle', u'long', u'penguin', u'playground', u'photograph', u'luggage', u'plow', u'item', u'factory', u'starfish', u'fire engine', u'locomotive', u'piggy bank', u'empty', u'scale', u'plate rack', u'graze', u'cutting', u'feeding', u'cooking', u'rapid', u'ledge', u'business', u'colored', u'forklift', u'boot', u'wing', u'remote control', u'trampoline', u'gas pump', u'space bar', u'snorkel', u'book', u'microscope', u'rain barrel', u'pair', u'Old World buffalo', u'airplane', u'creature', u'knee pad', u'whale', u'birdhouse', u'oxygen mask', u'bag', u'sailboat', u'mat', u'town', u'using', u'rugby ball', u'staring', u'shopping basket', u'binder', u'team', u'sailing vessel', u'ox', u'leopard', u'shield', u'full', u'Band Aid', u'mountaintop', u'crate', u'modem', u'family', u'tennis ball', u'barn', u'work', u'formation', u'barrow', u'goose', u'syringe', u'soap dispenser', u'kite', u'appliance', u'solar dish', u'lizard', u'paddling', u'cardigan', u'sink', u'control', u'toddler', u'mortarboard']\n\n useless_words = ['holding','hold' ,'wearing', 'wear' , 'standing','sitting', 'stand', 'sit' , 'smiling', 'smile', 'clothing', 'shirt', \"next\", 'posing', 'playing']\n abstract_words = ['beautiful', 'young']\n color_words = ['black', 'white', 'red', 'blue', 'brown']\n\n if choice == 1:\n return STOPWORDS\n\n STOPWORDS += unselected_words\n STOPWORDS += useless_words\n STOPWORDS += low_acc_words\n #STOPWORDS += color_words\n #STOPWORDS += abstract_words\n return STOPWORDS", "def choose(self, choice):\n if self.available(choice):\n self.select(choice)", "def chosen():\n wordList = loadWords()\n w = random.choice(wordList)\n word = w[:-1]\n return word", "def return_word():\n wordlist = load_words()\n word = random.choice(wordlist)\n return word", "def lesk(self, tweet, word, created_at, tweet_id):\n ikb_obj = self.database.get(self.collection_of_slangs, field='word', value=word)[0]\n ikb_id = ikb_obj[\"_id\"]\n\n dicts = ikb_obj['payload']\n elements = [value for dictt, item in dicts.items() for value in item]\n definitions, usages = self.extract_def_use(elements)\n if len(definitions) == 0:\n raise ValueError(\"Empty lists of definitions and usages\")\n usages_vec = self.model.vectorize_sentences(usages)\n tweet_vec = self.model.vectorize_sentences([tweet])\n cs = np.array(cosine_similarity(usages_vec, tweet_vec))\n ind_max = np.argmax(cs)\n\n best_definition = definitions[ind_max]\n dictionary_of_best_definition = EnrichmentLayer.find_name_of_dict_by_definition(dicts, best_definition)\n try:\n filter_for_search = {\"dictionary_title\": dictionary_of_best_definition, \"definition\": best_definition}\n document = self.database.get(self.collection_used_slang, filter=filter_for_search)[0]\n tweets = document['tweets']\n tweets.append(tweet_id)\n self.database.update(self.collection_used_slang, \"ikb_id\", ikb_id, {\"tweets\": tweets}, upsert=False)\n id_of_insert = document['_id']\n except IndexError:\n document = {'ikb_id': ikb_id, 'word': word, 'dictionary_title': dictionary_of_best_definition,\n 'definition': best_definition, 'created_at': created_at, 'tweets': [tweet_id]}\n id_of_insert = self.database.insert(self.collection_used_slang, document)\n\n return self.replace_word(tweet.split(), word, best_definition), best_definition, id_of_insert", "def get_spell_tag(page):\n soup = BeautifulSoup(page.text, 'html.parser')\n spell_tag = soup.find('a', {'class': 'spell'})\n\n return spell_tag", "def kindler (self,filename=''):\r\n\r\n import kindleflex as kindle\r\n from numbertools import rom_to_int, is_roman\r\n\r\n\r\n #To suspend spelling, while keeping default value\r\n check_spelling_was = self.check_spelling\r\n self.check_spelling = False\r\n\r\n\r\n YESTERMS_PLUS = YESTERMS+[' ',' ']\r\n\r\n #To load file\r\n while not filename:\r\n filename = input('FILENAME?')\r\n\r\n try:\r\n note_text = file_access.get_text_file(filename)\r\n except:\r\n display.noteprint(('ATTENTION','File cannot be found!'))\r\n note_text = ''\r\n if 'Highlight (' in note_text:\r\n #If there are highlights and notes\r\n\r\n note_obj = kindle.GetNotes(note_text)\r\n note_obj.set_for_kindle()\r\n\r\n else:\r\n note_obj = kindle.GetNotes(note_text)\r\n note_obj.set_for_kindle()\r\n note_obj.set_divider('Note -')\r\n\r\n note_iterator = note_obj.return_iterator()()\r\n active_qualities = set()\r\n\r\n\r\n if input('SHOW TEXT?') in YESTERMS_PLUS:\r\n print(note_text)\r\n\r\n for qual in note_obj.qualities:\r\n if input('Include sequence key for '+qual+'?') in YESTERMS_PLUS:\r\n active_qualities.add(qual)\r\n\r\n additional_keys = input('Add additional keys?') in YESTERMS_PLUS\r\n annotation_before = input('Add annotation before?') in YESTERMS_PLUS\r\n annotation_after = input('Add annotation after?') in YESTERMS_PLUS\r\n query_index = input('Query index position?') in YESTERMS_PLUS\r\n only_notes = input('Only include highlights with notes attached?')\r\n temp_c_i = input('Restrict to the following colors?')\r\n include_part = input('Include part?') in YESTERMS_PLUS\r\n if temp_c_i:\r\n colors_to_include = set(x.strip() for x in temp_c_i.split(','))\r\n else:\r\n colors_to_include = set()\r\n\r\n if not query_index:\r\n starting_index = ''\r\n while not starting_index:\r\n starting_index = input('Starting index position?')\r\n try:\r\n starting_index = Index(starting_index)\r\n except:\r\n pass\r\n else:\r\n starting_index = Index(-1)\r\n\r\n\r\n\r\n go_on = True\r\n note = ''\r\n count_down = 0\r\n automatic = False\r\n temp_i = ''\r\n display.noteprint(('NUMBER OF NOTES in COLLECTION',str(note_obj.size)))\r\n\r\n current_iteration=0\r\n while True:\r\n current_iteration+=1\r\n try:\r\n note = next(note_iterator)\r\n except:\r\n display.noteprint(('ATTENTION','FINISHED!'))\r\n break\r\n\r\n\r\n print(str(current_iteration)+'/'+str(note_obj.size))\r\n if count_down > 0:\r\n count_down -= 1\r\n\r\n else:\r\n\r\n new_keys = set()\r\n\r\n text = ''\r\n note_part = ''\r\n part = ''\r\n if 'TEXT' in note:\r\n text = note['TEXT']\r\n if 'NOTE' in note:\r\n note_part = note['NOTE']\r\n if 'highlightcolor' in note:\r\n highlight_color = note['highlightcolor']\r\n\r\n\r\n\r\n\r\n\r\n\r\n if not automatic and ((not colors_to_include or highlight_color in colors_to_include)\r\n and (not only_notes or note_part)):\r\n display.noteprint(('NUMBER OF NOTES in COLLECTION',str(note_obj.size)))\r\n display.noteprint(('CURRRENT POSITION',str(note_obj.position)))\r\n display.noteprint(('TEXT',text))\r\n display.noteprint(('NOTE',note_part))\r\n display.noteprint(('HIGHLIGHT COLOR',highlight_color))\r\n\r\n print_string = ''\r\n\r\n for qual in active_qualities:\r\n if qual in note:\r\n print_string += qual + ':' + note[qual] +', '\r\n if len(print_string)>1:\r\n print_string = print_string[0:-2]\r\n\r\n display.noteprint(('QAULITIES',print_string))\r\n\r\n temp_i = input('CREATE NOTE or quit to QUIT or NUMBERS to SKIP FORWARD or A(UTOMATIC) to add the rest of notes without querying')\r\n\r\n if temp_i.isnumeric():\r\n count_down = int(temp_i)\r\n if temp_i in ['A','AUTOMATIC']:\r\n\r\n automatic = True\r\n\r\n if temp_i in QUITTERMS:\r\n break\r\n\r\n\r\n elif (((not colors_to_include or highlight_color in colors_to_include)\r\n and (not only_notes or note_part))\r\n and (automatic or ((count_down == 0 and (temp_i in YESTERMS_PLUS or len(temp_i)>1 and temp_i[0]==' '))))):\r\n\r\n for qual in active_qualities:\r\n if qual in note:\r\n val = note[qual]\r\n if is_roman(val):\r\n val = str(rom_to_int(val))\r\n\r\n if qual == 'chapter':\r\n\r\n\r\n\r\n chapter_title = ''\r\n chapter = val\r\n\r\n if ':' in chapter:\r\n # THIS is specially designed for the kindle note format\r\n chapter_number, chapter_title = chapter.split(':')[0].strip(),chapter.split(':')[1].strip()\r\n if not chapter_number.isnumeric():\r\n try:\r\n chapter_number = str(rom_to_int(chapter_number.lower()))\r\n except:\r\n pass\r\n else:\r\n pass\r\n else:\r\n if not chapter.isnumeric() and not (part and include_part):\r\n\r\n part = chapter\r\n chapter_number = ''\r\n\r\n if chapter_number:\r\n new_keys.add('chapter@'+chapter_number)\r\n if chapter_title:\r\n new_keys.add('chaptertitle@'+chapter_title)\r\n else:\r\n new_keys.add(qual+'@'+val)\r\n\r\n if not automatic and additional_keys:\r\n for x in input('ADDITIONAL KEYS?').split(','):\r\n new_keys.add(x.strip())\r\n before, after = '',''\r\n if not automatic and annotation_before:\r\n before = input('ENTER ANNOTATION BEFORE?') +'/BREAK/'\r\n if not automatic and annotation_after:\r\n after = '/BREAK/' + input('ENTER ANNOTATION AFTER?')\r\n if note_part:\r\n note_part = '/BREAK/' + note_part\r\n\r\n\r\n\r\n if not automatic and query_index:\r\n new_index = None\r\n while not new_index:\r\n new_index = input('INDEX?')\r\n try:\r\n new_index = Index(new_index)\r\n except:\r\n pass\r\n else:\r\n new_index = starting_index\r\n\r\n final_text = before+text+after+note_part\r\n\r\n\r\n self.enter(ek=new_keys,\r\n et=final_text,\r\n right_at=query_index,\r\n ind=new_index)\r\n\r\n self.check_spelling = check_spelling_was", "def check_spellings(text):\n\n for word in vocabulary:\n text = correct(word, text, 0.7)\n return text", "def show_flashcard():\r\n random_key = choice(list(glossary))\r\n print('Define: ', random_key)\r\n input('Press return to see the correct definition')\r\n print(glossary[random_key])", "def on_text(self, char: str, game: type):", "def change_wordMeaning(self, conversation):\n if conversation == None:\n return\n\n # If no word was used in the last conversation\n if conversation.word == None and conversation.meaning != None:\n if self.random.random() <= self.model.new_word_rate: # Probability of 5% default\n new_word = self.create_word()\n while new_word in self.wordsuccess: # cannot have one word with multiple meanings\n new_word = self.create_word()\n print(\"New word:\", new_word)\n self.create_link(new_word, conversation.meaning)\n\n # If a word was used in the last conversation\n elif conversation.word != None:\n self.wordsuccess[conversation.word].append(conversation.success)\n\n # if the word was used R times, there is a chance it will be dropped\n if len(self.wordsuccess[conversation.word]) >= self.model.change_rate:\n if self.do_change(self.wordsuccess[conversation.word]):\n self.delete_link(conversation.word) # forget word\n else:\n self.wordsuccess[conversation.word] = [] # reset success", "def choose_option(friendly,enemy,opt1=\"Fight\",opt2=\"Bag\",opt3=\"Pokemon\",opt4 = \"Run\"):\n background_color = blit_background()[1]\n blit_friendly(friendly)\n blit_enemy(enemy)\n blit_health(friendly,enemy)\n pygame.display.update()\n pause(friendly,enemy,3) #to stop the click from 1st menu selecting option in second\n mouse_pos = 0,0\n while True:\n event_check(False, friendly,enemy)\n blit_background()\n opt_1 = pygame.draw.rect(screen,((background_color)),(60,540,300,70))\n blit_text(opt1,(70,545))\n opt_3 = pygame.draw.rect(screen,(background_color),(60,615,300,70))\n blit_text(opt2,(70,620))\n opt_2 = pygame.draw.rect(screen,(background_color),(360,540,300,70))\n blit_text(opt3,(370,545))\n opt_4 = pygame.draw.rect(screen,(background_color),(360,615,300,70))\n blit_text(opt4,(370,620))\n mouse_pos = get_click()\n blit_friendly(friendly)\n blit_enemy(enemy)\n blit_health(friendly,enemy)\n blit_text(\"What will you do?\",(800,580))\n pygame.display.update()\n if opt_1.collidepoint(mouse_pos):\n option = 1\n break\n elif opt_2.collidepoint(mouse_pos):\n option = 2\n break\n elif opt_3.collidepoint(mouse_pos):\n option = 3\n break\n elif opt_4.collidepoint(mouse_pos):\n option = 4\n break\n pygame.display.update()\n return option", "def tell_options(self, options):\n\n # store, in case we need to repeat\n self.last_options = options\n\n i = 0\n for d in ('left', 'forward', 'right', 'back'):\n i += 1\n if d in options:\n d_string = labyrinth_text.directions[d]\n nextpos = options[d]\n room_name, room_loc, room_dir = self.rooms[nextpos]\n\n if d == 'back':\n # dead ens are special\n if len(options) == 1:\n txt = labyrinth_text.deadend\n else:\n txt = labyrinth_text.youcangoback\n else:\n txt = labyrinth_text.youcango\n\n txt = txt.format(d_string, room_dir, room_name)\n #~ print txt\n \n #self.audio.play_presynthesized(nextpos * 5 + i)\n #~ self.audio.play_sound_file(self.sounds_direction[nextpos][d])\n self.audio.synthesize_and_play(txt)", "def tell_position(self, curpos):\n \n # store, in case we need to repeat\n self.last_curpos = curpos\n\n # fetch room strings\n room_name, room_loc, room_dir = self.rooms[curpos]\n \n # build utterance text\n txt = labyrinth_text.youare.format(room_loc, room_name)\n\n #~ print '-' * 70\n #~ print txt\n \n #self.audio.play_presynthesized(curpos * 5)\n #~ self.audio.play_sound_file(self.sounds_location[curpos])\n self.audio.synthesize_and_play(txt)", "def spell_corrector(words):\n de = enchant.Dict(\"de_DE\")\n en = enchant.Dict(\"en_GB\")\n new_words = []\n for w in words:\n if ( de.check(w) | en.check(w) ):\n new_words += [ w ]\n else:\n sug = de.suggest(w)\n if len(sug) > 0: new_words += [ sug[0] ]\n else: new_words += [ w ]\n return new_words", "def __dis_context__(self, context, word):\n senses = self.vs.get_senses(word, self.ignore_case)\n if self.verbose:\n print(\"Senses of a target word:\")\n print(senses)\n\n if len(senses) == 0: # means we don't know any sense for this word\n return None\n\n # collect context vectors\n vctx = [self.vc[c] for c in context]\n\n if len(vctx) == 0: # means we have no context\n return None\n # TODO: better return most frequent sense or make random choice\n\n # filter context vectors, if aplicable\n if self.filter_ctx >= 0:\n vctx = self.__filter__(vctx, senses, self.filter_ctx)\n\n if self.ctx_method == 'prob':\n avg_context = np.mean(vctx, axis=0)\n scores = [self.__logprob__(avg_context, self.vs[sense]) for sense, prob in senses]\n\n elif self.ctx_method == 'sim':\n avg_context = np.mean(vctx, axis=0)\n scores = [self.__cosine_sim__(avg_context, self.vs[sense]) for sense, prob in senses]\n if self.verbose:\n print(\"Sense probabilities:\")\n print(scores)\n\n else:\n raise ValueError(\"Unknown context handling method '%s'\" % self.ctx_method)\n\n # return sense (word#id), scores for senses\n return senses[np.argmax(scores)][0], scores", "def as_you_type_undo(self, event: Event) -> None:\n if not self.word:\n g.es('[no previous word]')\n return\n self.as_you_type_replace(self.word)", "async def choose(self, ctx, *args):\n choicelist = []\n for choice in args:\n choicelist.append(choice)\n result = random.choice(choicelist)\n await ctx.send(\"Like it or not, I choose {}!\".format(result))", "def func(self):\n try:\n if not self.switches or \"all\" in self.switches:\n self.list_favor()\n elif \"set\" in self.switches or \"add\" in self.switches:\n self.add_favor()\n elif \"remove\" in self.switches:\n self.remove_favor()\n else:\n raise CommandError(\"Invalid switch.\")\n except CommandError as err:\n self.msg(err)\n else:\n self.mark_command_used()", "def select_editor_contextual(menuName, onselected=None, *args, **kwargs):\n\n process_all_events()\n windows = Gtk.Window.list_toplevels()\n click_in_text(GPS.EditorBuffer.get().current_view().cursor(), button=3)\n\n def internal_onselected(windows):\n close_contextual(windows)\n process_all_events()\n if onselected:\n onselected(*args, **kwargs)\n\n GLib.idle_add(internal_onselected, windows)\n activate_contextual(windows, menuName)", "def select_word(options):\n options = list(set(options)) # Remove duplicate words\n selection = []\n if len(options) > 10:\n for n in range(10):\n word = random.choice(options)\n while word in selection:\n word = random.choice(options)\n selection.append(word)\n else:\n selection = options\n # Print selection options\n for n in range(len(selection)):\n index = n + 1\n print(\"{}. {}\".format(index, selection[n]))\n choice = input(\"Choice: \")\n if choice == 'x':\n sys.exit()\n if choice == 'p':\n choice = input(\"Word selection: \")\n word = selection[int(choice) - 1]\n return add_punctuation(word)\n choice = int(choice) - 1\n word = selection[choice]\n return word", "def setWordKnown(self):\n self.wordKnown = ''.join(['_ ' if w not in self.guessedRight else w for w in self.getWord()])", "def give(self):\n if self.location.character:\n item = input(f\"What do you want to give to {self.location.character.name}?\\n>\")\n if item in self.backpack:\n if self.location.character.give(item):\n if isinstance(self.location.character, Friend):\n loot = self.location.character.possession\n self.backpack[loot.name] = loot\n self.location.character.treat = None\n self.location.character.possession = None\n del self.backpack[item]\n return f\"{self.location.character.name} accepted your gift, and gave you {loot}\"\n if isinstance(self.location.character, Enemy):\n name = self.location.character.name\n self.location.character = None\n del self.backpack[item]\n return f\"You fend off {name} with {item}\"\n else:\n return f\"It does not accept {item}\"\n else:\n return f\"{self.location.character.name} does not like {item}\"\n else:\n return \"You don't have this\"\n else:\n return \"There is no one here\"", "def get_word(self, roll):\n self.passphrase.append(self.wordlist[int(roll)]);", "def show_word(self):\n self.display_word = len(self.chosen_word) * \"_ \"\n Donatello.draw_word(self.display_word)\n return self.display_word", "def func(self):\n char = self.character\n clothing = char.search(self.args, candidates=char.contents)\n if not clothing:\n return\n if not clothing.db.worn:\n char.msg(\"You're not wearing that!\")\n return\n if clothing.db.covered_by:\n char.msg(\"You have to take off %s first.\" % clothing.db.covered_by.name)\n return\n clothing.remove(char)", "def ability_3(self,target):\r\n damage = (self.get_dexterity()+self.get_strength())\r\n target.receive_damage(damage)", "def hook(self, sentence, words):\n pass", "def correct_spelling(sample):\n words = nltk.word_tokenize(sample[\"full_text\"])\n for word in words:\n for char, duplicate_char in duplicate_chars.items():\n word = re.sub(duplicate_char, char, word)\n #sample[\"full_text\"] = ' '.join([spell(w)] for w in tweet.split()])\n sample[\"full_text\"] = ' '.join([w for w in words])\n return sample", "def validate(self, context):\n _logger.info(\"SpellDictionary EN validated\")\n self.dictionary = {\"hello\" , \"world\", \"welcome\", \"to\", \"the\", \"ipopo\", \"tutorial\"}", "async def optin(self, ctx):\n optout.delete_one({\"_id\": ctx.author.id})\n await ctx.send(f\"You have **opted into** A Sound Mood. To leave the program, use ?optout.\")", "def spelling(self):\r\n return conf.lib.clang_getTranslationUnitSpelling(self)", "def text_to_speech(entry):\n text = entry.get_text()\n if text:\n subprocess.call([\"milena_say\", text])", "def custom_choice(update: Update, context: CallbackContext) -> int:\r\n update.message.reply_text(\r\n 'Primero agrega un tรญtulo a tu comentario, por ejemplo \"Atenciรณn\"'\r\n )\r\n\r\n return TYPING_CHOICE" ]
[ "0.6996071", "0.68401766", "0.6699092", "0.6081234", "0.6041909", "0.57928914", "0.5791414", "0.57462186", "0.5697205", "0.56437635", "0.56266904", "0.5601367", "0.5584377", "0.5549065", "0.54703563", "0.5469989", "0.5459944", "0.5440879", "0.5421685", "0.5419961", "0.5412594", "0.5399572", "0.5395074", "0.5371796", "0.53495604", "0.534505", "0.5335321", "0.5321675", "0.530375", "0.5303167", "0.53008294", "0.52583927", "0.5252657", "0.5248759", "0.5235073", "0.52236503", "0.5220335", "0.52085245", "0.52028275", "0.5177716", "0.51772493", "0.5151045", "0.51429963", "0.5137359", "0.51301354", "0.5125237", "0.5119367", "0.5117464", "0.5114822", "0.50968885", "0.5092398", "0.5051598", "0.5050705", "0.5045221", "0.50215846", "0.50190955", "0.49991226", "0.49750504", "0.49611306", "0.49602562", "0.49551257", "0.49526095", "0.49367565", "0.4935646", "0.4925065", "0.49202785", "0.49137136", "0.4911588", "0.49056464", "0.4904005", "0.4894698", "0.48866132", "0.48793042", "0.48718852", "0.48597685", "0.48526555", "0.48479542", "0.48474672", "0.48423505", "0.4835355", "0.4826809", "0.48261252", "0.48253495", "0.48248422", "0.4810554", "0.48079905", "0.4798883", "0.4796975", "0.47962892", "0.47957528", "0.4793731", "0.47917292", "0.47886094", "0.47878155", "0.47853854", "0.47801307", "0.47778037", "0.47768977", "0.4772388", "0.4768003" ]
0.5441747
17
Role claimer message. (Owner only)
async def role_claimer(event): # Double check. if not event.user_permissions.can_administrator: abort('Admin only') return InteractionResponse('Claim role by clicking on it', components = ROLE_CLAIMER_COMPONENTS)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def role_command():", "def myrole(var, wrapper, message):\n\n ps = get_participants()\n if wrapper.source not in ps:\n return\n\n role = get_main_role(wrapper.source)\n if role in Hidden:\n role = var.HIDDEN_ROLE\n\n evt = Event(\"myrole\", {\"role\": role, \"messages\": []})\n if not evt.dispatch(var, wrapper.source):\n return\n role = evt.data[\"role\"]\n\n wrapper.pm(messages[\"show_role\"].format(role))\n\n for msg in evt.data[\"messages\"]:\n wrapper.pm(msg)", "async def reacrole(self, ctx: commands.Context):\n pass", "async def handle_role_reaction_press(interaction: disnake.MessageInteraction):\n if interaction.message not in await ReactionRoleMessage.get_all():\n return\n\n role_id = int(interaction.component.custom_id)\n member: disnake.Member = interaction.author\n user = await User.get(member.id)\n role = member.get_role(role_id)\n if role:\n await member.remove_roles(role, reason=\"Reaction Role Message\")\n await send_message(user=user, key=\"role_removed\", inter=interaction, ephemeral=True)\n else:\n role = interaction.guild.get_role(role_id)\n if role:\n try:\n await member.add_roles(role, reason=\"Reaction Role Message\")\n await send_message(user=user, key=\"role_added\", inter=interaction, ephemeral=True)\n except disnake.errors.Forbidden as e:\n await send_message(user=user, key=\"no_permissions\", inter=interaction, ephemeral=True)\n else:\n await send_message(user=user, key=\"role_not_found\", inter=interaction, ephemeral=True)", "async def _toggle_role(self, ctx, selfrole: Selfrole):\n\n if selfrole.role not in ctx.message.author.roles:\n try:\n await ctx.message.author.add_roles(selfrole.role)\n except discord.Forbidden:\n raise exceptions.ForbiddenError(\n exceptions.ForbiddenTask.ADD_ROLE, selfrole.role.name\n )\n\n await ctx.send(f\"{config.YES} {selfrole.join_message}\")\n\n elif selfrole.role in ctx.message.author.roles:\n try:\n await ctx.message.author.remove_roles(selfrole.role)\n except discord.Forbidden:\n raise exceptions.ForbiddenError(\n exceptions.ForbiddenTask.REMOVE_ROLE, selfrole.role.name\n )\n\n await ctx.send(\n f\"{config.YES} The `{selfrole.role.name}` role was removed from you.\"\n )", "async def togglerole(self, ctx, role=\"\"):\r\n\r\n user = ctx.message.author\r\n joinmsg = \"Joined {0} role\"\r\n leavemsg = \"Left {0} role\"\r\n\r\n role = await commands.clean_content().convert(ctx, role)\r\n\r\n if role == \"\":\r\n embed = discord.Embed(title=\"Toggleable Roles:\", color=discord.Color.dark_teal())\r\n embed.description = \"\"\"\r\n - :race_car: Mario Kart 8 Deluxe: MK8D\r\n - :squid: Splatoon 2: spla2n\r\n - :card_box: Cards Against Humanity: cah\r\n - :bomb: Counter-Strike: Global Offensive: csgo\r\n - :gun: PUBG: pubg\r\n - :red_circle: Red Eclipse: redeclipse\r\n - :robot: Titanfall (2): titanfall\r\n - :boxing_glove: Super Smash Bros.: smash\r\n - :shopping_cart: Fortnite: fortnite\r\n \"\"\"\r\n await ctx.send(\"\", embed=embed)\r\n\r\n elif role.lower() == \"mk8d\":\r\n if self.bot.mk8d_role in user.roles:\r\n await user.remove_roles(self.bot.mk8d_role)\r\n await ctx.send(leavemsg.format(role.upper()))\r\n\r\n else:\r\n print(\"before error\")\r\n await user.add_roles(self.bot.mk8d_role)\r\n print(\"after error\")\r\n await ctx.send(joinmsg.format(role.upper()))\r\n\r\n elif role.lower() == \"spla2n\":\r\n if self.bot.spla2n_role in user.roles:\r\n await user.remove_roles(self.bot.spla2n_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.spla2n_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"cah\":\r\n if self.bot.cah_role in user.roles:\r\n await user.remove_roles(self.bot.cah_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.cah_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"csgo\":\r\n if self.bot.csgo_role in user.roles:\r\n await user.remove_roles(self.bot.csgo_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.csgo_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"pubg\":\r\n if self.bot.pubg_role in user.roles:\r\n await user.remove_roles(self.bot.pubg_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.pubg_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"redeclipse\":\r\n if self.bot.redeclipse_role in user.roles:\r\n await user.remove_roles(self.bot.redeclipse_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.redeclipse_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"titanfall\":\r\n if self.bot.titanfall_role in user.roles:\r\n await user.remove_roles(self.bot.titanfall_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.titanfall_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"smash\":\r\n if self.bot.smashbros_role in user.roles:\r\n await user.remove_roles(self.bot.smashbros_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.smashbros_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n\r\n elif role.lower() == \"fortnite\":\r\n if self.bot.fortnite_role in user.roles:\r\n await user.remove_roles(self.bot.fortnite_role)\r\n await ctx.send(leavemsg.format(role.lower()))\r\n\r\n else:\r\n await user.add_roles(self.bot.fortnite_role)\r\n await ctx.send(joinmsg.format(role.lower()))\r\n else:\r\n msg = \"{} is not a togglable role\".format(role)\r\n await ctx.send(msg)", "async def roleme(self, ctx):\n if ctx.invoked_subcommand is None:\n allowed = [\n f\"`{ctx.guild.get_role(role)}`\" for role in ctx.bot.cache[str(\n ctx.guild.id)].get(\"self_roles\", [])\n ]\n if len(allowed):\n return await ctx.send(embed = await Macro.send(\n f\"You can assign yourself the roles: {', '.join(allowed)}\")\n )\n\n await ctx.send(embed = await Macro.send(\n f\"This guild doesn't have any self assignable roles\"))", "def role(self) -> GameRole:\n pass", "async def role(ctx, role: discord.Role = None):\n if role is None:\n await ctx.send(\"List of assignable roles: \" + str(allowed_roles))\n if role.name in allowed_roles:\n if not role in ctx.message.author.roles:\n await ctx.message.author.add_roles(role)\n await ctx.send(\"Role added.\")\n else:\n await ctx.message.author.remove_roles(role)\n await ctx.send(\"Role removed.\") \n else:\n await ctx.send(\"That role doesn't exist, or you don't have permission to modify it.\")", "async def createrole(self, ctx, role: str):\n if role.lower() == \"muted\" or role.lower() == \"punished\":\n return await ctx.send(\"Can not create this roles.\")\n \"\"\"Create a new role\"\"\"\n role = await ctx.guild.create_role(name=role)\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Role *{role}* has been created!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def setroles(self, ctx):\n if not has_permissions(ctx, MOD_ROLE):\n await ctx.send(\"You do not have sufficient permissions to perform this command\", hidden=True)\n return False\n\n def check(m):\n return m.author == ctx.author\n\n roles_dict = {}\n while True:\n info_embed = Embed(title=\"/setroles - Enter information\", colour=Colour.dark_purple())\n info_embed.description = \"Please enter a message tagging the role and all the members who you would like \" \\\n \"to assign it to.\"\n info_embed.set_footer(text='\"done/finished/yes/y\" to continue\\n\"no/cancel/n/stop\" to cancel')\n\n for role in roles_dict:\n users_string = f\"{role.mention}\\n\"\n for user in roles_dict[role]:\n users_string += f\"{user.mention}\\n\"\n info_embed.add_field(name=f\"{role.name} ({len(roles_dict[role])})\", value=users_string)\n\n info_message = await ctx.send(embed=info_embed)\n\n response = await self.bot.wait_for(\"message\", check=check)\n if response.content.lower() in [\"done\", \"finished\", \"yes\", \"y\"]:\n if len(roles_dict.keys()) > 0:\n await response.delete()\n await info_message.delete()\n total_roles_count = 0\n embed = Embed(title=\"Roles Summary\", description=\"Please review the roles you are about to set\\n\\n\"\n \"*this message has a timeout of 5 minutes*\",\n colour=Colour.dark_purple())\n embed.set_footer(text=f\"โœ… to set roles\\nโŒ to cancel\")\n for role in roles_dict:\n users_string = f\"{role.mention}\\n\"\n for user in roles_dict[role]:\n users_string += f\"{user.mention}\\n\"\n total_roles_count += len(roles_dict[role])\n embed.add_field(name=f\"{role.name} ({len(roles_dict[role])})\", value=users_string)\n embed.description += f\"\\n*{total_roles_count} members in total*\"\n message = await ctx.send(embed=embed)\n await message.add_reaction(\"โœ…\")\n await message.add_reaction(\"โŒ\")\n\n def check_reaction(r, u):\n return r.message.id == message.id and u == ctx.author and str(r.emoji) in [\"โœ…\", \"โŒ\"]\n\n set_roles = False\n while True:\n try:\n reaction, user = await self.bot.wait_for(\"reaction_add\", timeout=300, check=check_reaction)\n if str(reaction.emoji) == \"โœ…\":\n await message.clear_reactions()\n embed.set_footer(text=Embed.Empty)\n embed.description = Embed.Empty\n await message.edit(embed=embed)\n set_roles = True\n break\n elif str(reaction.emoji) == \"โŒ\":\n raise TimeoutError\n else:\n await message.remove_reaction(reaction, user)\n except TimeoutError:\n await message.edit(content=\"Message Expired\", embed=None)\n await message.clear_reactions()\n break\n if set_roles:\n roles_embed = Embed(title=\"Setting Roles\", colour=Colour.green())\n roles_assigned = 0\n roles_msg = await ctx.send(embed=roles_embed)\n for role in roles_dict:\n users_string = f\"{role.mention}\\n\"\n for member in roles_dict[role]:\n users_string += f\"{member.mention}\\n\"\n await member.add_roles(role, reason=f\"role added by {ctx.author.name} with setroles\"\n f\" command\")\n roles_assigned += 1\n if roles_assigned % 5 == 0:\n roles_embed.description = f\"Progress: {roles_assigned}/{total_roles_count}\"\n await roles_msg.edit(embed=roles_embed)\n roles_embed.add_field(name=f\"{role.name} ({len(roles_dict[role])})\", value=users_string)\n await roles_msg.edit(embed=roles_embed)\n roles_embed.title = \"Roles Set\"\n roles_embed.description = f\"Progress: Done\"\n await roles_msg.edit(embed=roles_embed)\n await message.delete()\n return\n else:\n await error_embed(ctx, \"You didn't input anything, cancelled setroles command\")\n return\n elif response.content.lower() in [\"no\", \"cancel\", \"n\", \"stop\"]:\n await info_message.delete()\n await response.delete()\n await ctx.send(embed=Embed(title=\"Cancelled\", description=\"You cancelled the setroles command\",\n colour=Colour.dark_purple()))\n return\n else:\n members = response.mentions\n if len(members) > 0:\n if len(response.role_mentions) == 1:\n role = response.role_mentions[0]\n server = ctx.guild\n bot_member = server.get_member(self.bot.user.id)\n\n if bot_member.top_role.position <= role.position:\n await error_embed(ctx, \"This role is too high to be set by the bot. Please enter a \"\n \"different role.\")\n elif ctx.author.top_role.position <= role.position:\n await error_embed(ctx, \"You cannot give others this role\")\n else:\n roles_dict[role] = members\n else:\n await error_embed(ctx, \"You can only mention one role at a time\")\n else:\n await error_embed(ctx, \"You did not mention any members\")\n await info_message.delete()", "async def edit(self, ctx: commands.Context, message_id: int):\n\n # Standard wait_for check function for message inputs, makes sure the command user's messages in command channel are considered\n def message_check(m: discord.Message):\n return m.author == ctx.author and m.channel == ctx.channel\n\n # Standard reaction check that ensures no duplicate reacrole entry, just name the relevant message 'm' before adding this one to check kwarg in wait_for\n def reaction_check_nd(_r: discord.Reaction, _u):\n return _u == ctx.author and _r.message == m and str(_r.emoji) not in self._cache[ctx.guild.id][PM.id]\n\n if message_id in self._cache[ctx.guild.id]:\n\n # Not actually channel id int but I decided to name it that way anyway\n chanid = await self.bot.pool.fetchrow(\"SELECT channelid FROM selfrole_lookup WHERE messageid = $1\", message_id)\n chan: discord.TextChannel = ctx.guild.get_channel(chanid['channelid'])\n\n # Currently need message content for title, might start saving title in db to avoid this api call idk\n try:\n PM: discord.Message = await chan.fetch_message(message_id)\n except discord.NotFound:\n await ctx.send(\"It would seem that the message for the role menu you're trying to edit has been deleted, please try creating a new one\")\n return\n\n buttons = [\"\\U0001f1e6\", \"\\U0001f1e7\", \"\\U0001f1e8\", \"\\U0001f1e9\"]\n\n e1 = discord.Embed(title=\"What aspect of the menu do you wish to change?\",\n description=\"\\U0001f1e6 - Add a role\\n\\n\"\n \"\\U0001f1e7 - Remove existing role\\n\\n\"\n \"\\U0001f1e8 - Edit the reaction of a role\\n\\n\"\n \"\\U0001f1e9 - Change the title\",\n colour=discord.Colour.blue())\n # Send the initial menu\n menu = await ctx.send(embed=e1)\n\n for button in buttons:\n await menu.add_reaction(button)\n\n # We need the first reaction where the emoji is one of the buttons\n def button_check(_r, _u):\n return _u == ctx.author and _r.message == menu and str(_r.emoji) in buttons\n # Get the option the user chose\n try:\n r, u = await self.bot.wait_for('reaction_add', check=button_check, timeout=20)\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n\n # If user wanted to add a new role to the menu\n if str(r.emoji) == buttons[0]:\n await menu.clear_reactions()\n await menu.edit(content=\"What role do you wish to be added? Enter its mention, id, or name\", embed=None)\n\n # Get the role object for the new role to be added\n try:\n m = await self.bot.wait_for('message', check=message_check, timeout=30)\n newrole = await self.rc.convert(ctx, m.content)\n\n if newrole.id in self._cache[ctx.guild.id][PM.id].values():\n await ctx.send(\"Error: role already exists in the menu, perhaps you meant to edit it?\")\n return\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n except commands.BadArgument:\n await ctx.send(\"Role not found, please try again\")\n return\n\n m = await ctx.send(f\"React on this message with the reaction that will correspond to the role `{newrole}`\")\n\n # Get the reaction/emoji that will correspond to the new role and yank everything into db\n try:\n r, u = await self.bot.wait_for('reaction_add', check=reaction_check_nd, timeout=30)\n self._cache[ctx.guild.id][PM.id][str(r.emoji)] = newrole.id\n\n query = \"\"\"\n INSERT INTO selfrole (messageid, emoji, roleid)\n VALUES ($1, $2, $3)\n \"\"\"\n\n await self.bot.pool.execute(query, PM.id, str(r.emoji), newrole.id)\n\n # Standard way of getting the embed description of the role menu\n newmenudesc = \"\\n\\n\".join([f\"{k} - {ctx.guild.get_role(v)}\" for k, v in self._cache[ctx.guild.id][PM.id].items()])\n\n newembed = discord.Embed(title=PM.embeds[0].title,\n description=newmenudesc,\n colour=discord.Colour.blue())\n\n await PM.edit(embed=newembed)\n await PM.add_reaction(r.emoji)\n await ctx.send(\"Menu edit completed successfully, you may now delete the messages other than the menu itself\")\n\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n\n elif str(r.emoji) == buttons[1]:\n # Gotta yank the buttons to make everything squeaky clean\n await menu.clear_reactions()\n await menu.edit(content=\"Enter the role you wish to remove from the menu, can be mention, id or name\",\n embed=None)\n\n try:\n # Get role from user\n m = await self.bot.wait_for('message', check=message_check, timeout=20)\n role = await self.rc.convert(ctx, m.content)\n\n # If user trying to edit reaction to role that wasn't even in the menu to begin with\n if role.id not in self._cache[ctx.guild.id][PM.id].values():\n raise commands.BadArgument(\"Role not in cache\")\n\n # Get the key to delete using the old fashioned way, and subsequently delete it\n targetkey = \"\"\n for key, value in self._cache[ctx.guild.id][PM.id].items():\n if value == role.id:\n targetkey = key\n break\n self._cache[ctx.guild.id][PM.id].pop(targetkey)\n\n # After everything is done and dusted, make database entry and edit the menu\n query = \"\"\"\n DELETE FROM selfrole WHERE messageid = $1 AND roleid = $2\n \"\"\"\n await self.bot.pool.execute(query, PM.id, role.id)\n newmenudesc = \"\\n\\n\".join(\n [f\"{k} - {ctx.guild.get_role(v)}\" for k, v in self._cache[ctx.guild.id][PM.id].items()])\n\n newembed = discord.Embed(title=PM.embeds[0].title,\n description=newmenudesc,\n colour=discord.Colour.blue())\n await PM.edit(embed=newembed)\n await PM.clear_reaction(targetkey)\n await ctx.send(\n \"Menu edit completed successfully, you may now delete the messages other than the menu itself\")\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n except commands.BadArgument:\n await ctx.send(\"I don't think that role exists in that menu, run the command again\")\n return\n\n elif str(r.emoji) == buttons[2]:\n # Same drill, remove buttons to make it look clean\n await menu.clear_reactions()\n await menu.edit(embed=None, content=\"Enter the role for which you wish to change the reaction.\")\n\n try:\n m = await self.bot.wait_for('message', check=message_check, timeout=20)\n role = await self.rc.convert(ctx, m.content)\n\n if role.id not in self._cache[ctx.guild.id][PM.id].values():\n raise commands.BadArgument(\"Role not in cache\")\n\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n except commands.BadArgument:\n await ctx.send(\"Couldn't find the role you wished to edit in the menu\")\n return\n\n # Get the reaction/emoji that will correspond to the new role and yank everything into db\n m = await ctx.send(f\"React on this message with the new reaction that will correspond to the role {role}\")\n try:\n r, u = await self.bot.wait_for('reaction_add', check=reaction_check_nd, timeout=30)\n\n # Can only delete entry if have the key so....\n TargetKey = \"\" # Set default value so IDE stops screaming\n for k, v in self._cache[ctx.guild.id][PM.id].items():\n if v == role.id:\n TargetKey = k\n\n # Make new entry and delete the old one\n self._cache[ctx.guild.id][PM.id][str(r.emoji)] = role.id\n self._cache[ctx.guild.id][PM.id].pop(TargetKey)\n\n # After everything is done and dusted, at last update the database entry\n await self.bot.pool.execute(\"UPDATE selfrole SET emoji = $1 WHERE roleid = $2 AND messageid = $3\", str(r.emoji), role.id, PM.id)\n\n # Hehehehehehe\n newmenudesc = \"\\n\\n\".join(\n [f\"{k} - {ctx.guild.get_role(v)}\" for k, v in self._cache[ctx.guild.id][PM.id].items()])\n\n newembed = discord.Embed(title=PM.embeds[0].title,\n description=newmenudesc,\n colour=discord.Colour.blue())\n\n await PM.edit(embed=newembed)\n await PM.clear_reaction(TargetKey)\n await PM.add_reaction(str(r.emoji))\n await ctx.send(\n \"Menu edit completed successfully, you may now delete the messages other than the menu itself\")\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n\n elif str(r.emoji) == buttons[3]:\n # This one speaks for itself I think.\n await menu.clear_reactions()\n await menu.edit(embed=None, content=\"Enter the new title you want the menu to have\")\n try:\n m = await self.bot.wait_for('message', check=message_check, timeout=30)\n e = discord.Embed(title=f\"Role menu: {m.content}\",\n description=PM.embeds[0].description,\n colour=PM.embeds[0].colour)\n await PM.edit(embed=e)\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n\n else:\n await ctx.send(\"Menu not found in this server, double check if the id was entered correctly\")", "async def togglerole(self, ctx, role):\n \n user = ctx.message.author\n await ctx.message.delete()\n\n\n if role == \"MK8D\":\n if self.bot.mk8d_role in user.roles:\n await user.remove_roles(self.bot.mk8d_role)\n await user.send(\"Left MK8D role\")\n\n else:\n await user.add_roles(self.bot.mk8d_role)\n await user.send(\"Joined MK8D role\")\n else:\n await user.send(\"{} is not a togglable role\".format(role))", "async def removeroleall(self, ctx, role: discord.Role):\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to remove this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not remove *{role}* role using this command.\",\n description=\"For more information run ```.help removeroleall```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n for i in ctx.guild.members:\n if not i.bot:\n await i.remove_roles(role)\n\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been removed from **{len(ctx.guild.members)}** members!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def muterole(self, ctx, *, role: discord.Role):\n await queries.update_setting(ctx, \"guild_settings\", \"mute_role_id\", role.id)\n await util.send_success(ctx, f\"Muting someone now gives them the role {role.mention}\")", "async def editrole(self, ctx: context.CustomContext, *, role: Fuzzy[Selfrole]):\n\n new_join_message = await ctx.input(\n f\"{config.USER_INTERACTION_REQUIRED} Reply with the new join message for `{role.role.name}`.\"\n f\"\\n{config.HINT} The current join message is: `{role.join_message}`\"\n )\n\n await self.bot.db.execute(\n \"UPDATE selfrole SET join_message = $1 WHERE role_id = $2\",\n new_join_message,\n role.role.id,\n )\n\n await ctx.send(\n f\"{config.YES} The join message for `{role.role.name}` was updated.\"\n )", "async def approve(self, ctx, user: discord.Member):\n server = ctx.message.server\n if user.id in self.norole[server.id]:\n if self.norole[server.id][user.id]['Role'] == True:\n self.norole[server.id][user.id] = {'Role': False}\n dataIO.save_json(self.warninglist, self.norole)\n nobnl = discord.utils.get(server.roles, name = \"NoBNL\")\n await self.bot.remove_roles(user,nobnl)\n msg = await self.bot.say (\"Role removed!\")\n await asyncio.sleep(8)\n await self.bot.delete_message(msg) \n await self.bot.delete_message(ctx.message)\n else:\n msg = await self.bot.say(\"There is no role to remove!\")\n await asyncio.sleep(8)\n await self.bot.delete_message(msg)\n await self.bot.delete_message(ctx.message)", "async def pingrole(self, ctx, role: discord.Role, *, text):\n if role.mentionable:\n await ctx.send(inline('Error: role is already mentionable'))\n return\n\n try:\n await role.edit(mentionable=True)\n except Exception as ex:\n await ctx.send(inline('Error: failed to set role mentionable'))\n if ex.text == \"Missing Permissions\":\n message = await ctx.send(inline('Make sure this bot\\'s role is higher than the one you\\'re mentioning'))\n await asyncio.sleep(3)\n await message.delete()\n return\n\n await ctx.message.delete()\n await asyncio.sleep(1)\n await ctx.send('From {}:\\n{}\\n{}'.format(ctx.author.mention, role.mention, text))\n\n try:\n await role.edit(mentionable=False)\n except Exception as ex:\n await ctx.send(inline('Error: failed to set role unmentionable'))\n return", "async def muterole(self, ctx, rolename: str):\n self.data_check(ctx)\n server = ctx.message.server\n \n self.riceCog2[server.id][\"muterole\"] = rolename\n dataIO.save_json(self.warning_settings,\n self.riceCog2)\n await self.bot.say(\"Muted role name is now: **{}**\".format(rolename))", "async def x5lol(ctx):\n #Shortcut to Author of the Message\n atr = ctx.author\n #Shortcut to Author Current Voice Channel\n currentvc = atr.voice.channel.name\n #Shortcut to Voice Channel Members list\n usrs = atr.voice.channel.members\n #Specify what role will use \n role = ctx.guild.get_role(\"\"\"Insert role id\"\"\")\n \n pot = []\n #await ctx.send(atr.name +' '+currentvc)\n for i in usrs:\n if role in i.roles:\n #await ctx.send('O ' + str(i) + ' ร‰ ' + str(role))\n nick = (str(i.nick))\n if nick != 'None':\n pot.append(str(i.nick))\n else:\n pot.append(str(i))\n \n \n #NOTE:The math part of team ballance isn't done yet\n # For now, Bot shuffle the names, then send to Text Channel on Discord \n random.shuffle(pot)\n await ctx.send(pot)", "async def anticipation(self, ctx: commands.Context):\n role = ctx.guild.get_role(529447810127495168)\n\n if role.id not in (r.id for r in ctx.author.roles):\n await ctx.author.add_roles(role, reason=\"/anticipation\")\n embed = discord.Embed(\n colour=discord.Colour.green(),\n description=\"Anticipation Notifications successfully added.\"\n )\n await ctx.send(embed=embed)\n\n else:\n await ctx.author.remove_roles(role, reason=\"/anticipation\")\n embed = discord.Embed(\n colour=discord.Colour.red(),\n description=\"Anticipation Notifications successfully removed.\"\n )\n await ctx.send(embed=embed)", "async def addroleall(self, ctx, role: discord.Role):\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to add this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not assign *{role}* role using this command.\",\n description=\"For more information run ```.help addroleall```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n for i in ctx.guild.members:\n if not i.bot:\n await i.add_roles(role)\n\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been added to **{len(ctx.guild.members)}** members!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def welcomerole(self, ctx, role: discord.Role = None):\n if not ctx.guild: # command must not be used in DMs.\n return await ctx.send(await self.ex.get_msg(ctx, \"general\", \"no_dm\"))\n\n guild = self.ex.cache.welcome_roles.get(ctx.guild)\n\n # if the user wants to delete the role.\n if not role and guild:\n await self.ex.sql.s_general.delete_welcome_role(ctx.guild.id)\n self.ex.cache.welcome_roles.pop(ctx.guild)\n return await ctx.send(await self.ex.get_msg(ctx, \"moderator\", \"welcome_role_delete\"))\n elif not role and not guild:\n return await ctx.send(await self.ex.get_msg(ctx, \"moderator\", \"no_role_to_delete\"))\n\n try:\n # if the user wants to set a role\n if role and guild:\n # role already in place\n await self.ex.sql.s_general.update_welcome_role(guild.id, role.id)\n elif role and not guild:\n # first time they are using the command\n await self.ex.sql.s_general.insert_welcome_role(ctx.guild.id, role.id)\n except Exception as e:\n log.console(f\"{e} -> Welcome Join Cache and DB are not synced.\")\n\n self.ex.cache.welcome_roles[ctx.guild] = role\n return await ctx.send(await self.ex.get_msg(ctx, \"moderator\", \"welcome_role_set\"))", "async def toggle_group(role_title, ctx):\n if ctx.guild is None:\n return 'whisper'\n server_roles = ctx.guild.roles\n #print(\"Server roles\", server_roles)\n user_roles = ctx.author.roles\n #print(\"Author roles\", user_roles)\n\n role_id = \"\"\n\n #Finding the role on the server. If it doesn't exist, we'll let the user know.\n found_role = False\n role_id_index = ''\n for i in server_roles:\n #print(i.name.lower())\n if i.name.lower() == role_title.lower(): #.lower is for consistency\n role_id = i\n found_role = True\n try:\n role_id_index = user_roles.index(i)\n except:\n pass\n\n if not found_role:\n return \"role not found\"\n else:\n if role_id in user_roles:\n # User has this role, need to remove it.\n user_roles.pop(role_id_index)\n await ctx.author.edit(roles=user_roles, reason=\"Automated role removal requested by user\")\n return \"removed\"\n else:\n # User does not have this role\n user_roles.append(role_id)\n await ctx.author.edit(roles=user_roles, reason=\"Automated role add requested by user\")\n return \"added\"", "async def update_cog(self):\n\n # get the model data for the role assigner object\n data = await self.get_objects(\n model=RoleAssigner, filter={\"bot__name\": str(self.bot_name)}\n )\n\n # role assigner object\n data = data[0]\n\n # fetch the discord message\n guild_id = await self.get_deep_data(data, \"bot__server__uid\")\n\n guild = self.get_guild(int(guild_id))\n channel = self.get_channel(guild, int(data.message.cuid))\n message = await channel.fetch_message(int(data.message.uid))\n self.message_id = int(data.message.uid)\n\n # update the message\n await message.edit(content=\"_ _\", embed=self.create_message_embed(data))\n\n await self.update_reactions(message, data)", "async def lock(ctx):\n member = ctx.message.author\n channel = ctx.message.channel\n\n if (channel.category.name in [\"beta\", \"staff\", \"Pi-Bot\"]):\n return await ctx.send(\"This command is not suitable for this channel because of its category.\")\n\n member_role = discord.utils.get(member.guild.roles, name=ROLE_MR)\n if (channel.category.name == CATEGORY_STATES):\n await ctx.channel.set_permissions(member_role, add_reactions=False, send_messages=False)\n else:\n await ctx.channel.set_permissions(member_role, add_reactions=False, send_messages=False, read_messages=True)\n\n wiki_role = discord.utils.get(member.guild.roles, name=ROLE_WM)\n gm_role = discord.utils.get(member.guild.roles, name=ROLE_GM)\n admin_role = discord.utils.get(member.guild.roles, name=ROLE_AD)\n bot_role = discord.utils.get(member.guild.roles, name=ROLE_BT)\n await ctx.channel.set_permissions(wiki_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(gm_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(admin_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(bot_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.send(\"Locked the channel to Member access.\")", "async def deny(self, ctx, user: discord.Member, *, reason: str=None):\n self.data_check(ctx)\n server = ctx.message.server\n try:\n defchannel = self.riceCog2[server.id][\"defchannel\"]\n except:\n defchannel = default_channel\n try:\n channelmute = self.riceCog2[server.id][\"channelmute\"]\n except:\n channelmute = defchannelmute \n channel = discord.utils.get(server.channels, name = defchannel)\n if channel is None:\n msg = await self.bot.say (\"I was unable to write to your log channel. Please make sure there is a channel called {} on the server!\".format(defchannel))\n return\n else:\n pass\n if reason is None:\n msg = await self.bot.say(\"Please enter a reason for the warning!\")\n await asyncio.sleep(5)\n await self.bot.delete_message(msg)\n return\n if user.id in self.norole[server.id]:\n if self.norole[server.id][user.id]['Role'] == True:\n msg = await self.bot.say(\"This user has already been denied access to the channel.\")\n await asyncio.sleep(8)\n await self.bot.delete_message(msg) \n await self.bot.delete_message(ctx.message)\n return\n else:\n nobnl = discord.utils.get(server.roles, name = \"NoBNL\")\n role = nobnl \n mod = ctx.message.author\n await self.bot.delete_message(ctx.message)\n await self.bot.add_roles(user, nobnl)\n dmuser = await self.bot.start_private_message(user)\n await self.bot.send_message(dmuser, \"Howdy!\\nThis is to let you know that you have been denied access to the channel for the reason:\\n\\n```{}``` \\nPlease speak to a member of staff if you have an issue.\".format(reason))\n user=user\n reason=reason\n ID = uuid.uuid4()\n embed=discord.Embed(title=\"User Denied:\", color=0xA00000)\n embed.add_field(name=\"Case ID:\", value=ID, inline=False)\n embed.add_field(name=\"Moderator:\", value=mod, inline=False)\n embed.add_field(name=\"User:\", value=\"{0} ({0.id})\".format(user), inline=False)\n embed.add_field(name=\"Reason:\", value=reason, inline=False)\n react = await self.bot.send_message(channel, embed=embed)\n await self.bot.add_reaction(react, \"\\U0001f44d\")\n await self.bot.add_reaction(react, \"\\U0001f44e\")\n await self.bot.add_reaction(react, \"\\U0001f937\")\n self.norole[server.id][user.id] = {\n 'Reason': reason,\n 'Mod': ctx.message.author.id,\n 'Role': True\n }\n dataIO.save_json(self.warninglist, self.norole)\n channel = discord.utils.get(server.channels, name = channelmute)\n for channel in server.channels:\n perms = discord.PermissionOverwrite()\n \n if channel.type == discord.ChannelType.text:\n perms.send_messages = False\n perms.read_messages = False\n await self.bot.edit_channel_permissions(channel, role, overwrite=perms) \n else:\n nobnl = discord.utils.get(server.roles, name = \"NoBNL\")\n role = nobnl \n mod = ctx.message.author\n await self.bot.delete_message(ctx.message)\n await self.bot.add_roles(user, nobnl)\n dmuser = await self.bot.start_private_message(user)\n await self.bot.send_message(dmuser, \"Howdy!\\nThis is to let you know that you have been denied access to the channel for the reason:\\n\\n```{}``` \\nPlease speak to a member of staff if you have an issue.\".format(reason))\n user=user\n reason=reason\n ID = uuid.uuid4()\n embed=discord.Embed(title=\"User Denied:\", color=0xA00000)\n embed.add_field(name=\"Case ID:\", value=ID, inline=False)\n embed.add_field(name=\"Moderator:\", value=mod, inline=False)\n embed.add_field(name=\"User:\", value=\"{0} ({0.id})\".format(user), inline=False)\n embed.add_field(name=\"Reason:\", value=reason, inline=False)\n react = await self.bot.send_message(channel, embed=embed)\n await self.bot.add_reaction(react, \"\\U0001f44d\")\n await self.bot.add_reaction(react, \"\\U0001f44e\")\n await self.bot.add_reaction(react, \"\\U0001f937\")\n self.norole[server.id][user.id] = {\n 'Reason': reason,\n 'Mod': ctx.message.author.id,\n 'Role': True\n }\n dataIO.save_json(self.warninglist, self.norole)\n channel = discord.utils.get(server.channels, name = channelmute)\n for channel in server.channels:\n perms = discord.PermissionOverwrite()\n \n if channel.type == discord.ChannelType.text:\n perms.send_messages = False\n perms.read_messages = False\n await self.bot.edit_channel_permissions(channel, role, overwrite=perms)", "async def accept(self, ctx: commands.Context, target: discord.Member):\n try:\n accepter = get(ctx.guild.roles, id = await self.config.guild(ctx.guild).accepter_id())\n except TypeError:\n accepter = None\n if not accepter:\n if not ctx.author.guild_permissions.administrator:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n else:\n if accepter not in ctx.author.roles:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n try:\n applicant = get(ctx.guild.roles, id = await self.config.guild(ctx.guild).applicant_id())\n except TypeError:\n applicant = None\n if not applicant:\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n if not applicant:\n return await ctx.send(\"Uh oh, the configuration is not correct. Ask the Admins to set it.\")\n role = MessagePredicate.valid_role(ctx)\n if applicant in target.roles:\n await ctx.send(f\"What role do you want to accept {target.name} as?\")\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=role)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n role_add = role.result\n try:\n await target.add_roles(role_add)\n except discord.Forbidden:\n return await ctx.send(\"Uh oh, I cannot give them the role. It might be above all of my roles.\")\n await target.remove_roles(applicant)\n await ctx.send(f\"Accepted {target.mention} as {role_add}.\")\n await target.send(\n f\"You have been accepted as {role_add} in {ctx.guild.name}.\"\n )\n else:\n await ctx.send(\n f\"Uh oh. Looks like {target.mention} hasn't applied for anything.\"\n )", "async def accept(self, ctx: commands.Context, target: discord.Member):\n try:\n accepter = get(ctx.guild.roles, id=await self.config.guild(ctx.guild).accepter_id())\n except TypeError:\n accepter = None\n if (\n not accepter\n and not ctx.author.guild_permissions.administrator\n or accepter\n and accepter not in ctx.author.roles\n ):\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n try:\n applicant = get(ctx.guild.roles, id=await self.config.guild(ctx.guild).applicant_id())\n except TypeError:\n applicant = None\n if not applicant:\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n if not applicant:\n return await ctx.send(\n \"Uh oh, the configuration is not correct. Ask the Admins to set it.\"\n )\n role = MessagePredicate.valid_role(ctx)\n if applicant in target.roles:\n await ctx.send(f\"What role do you want to accept {target.name} as?\")\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=role)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n role_add = role.result\n try:\n await target.add_roles(role_add)\n except discord.Forbidden:\n return await ctx.send(\n \"Uh oh, I cannot give them the role. It might be above all of my roles.\"\n )\n await target.remove_roles(applicant)\n await ctx.send(f\"Accepted {target.mention} as {role_add}.\")\n await target.send(f\"You have been accepted as {role_add} in {ctx.guild.name}.\")\n else:\n await ctx.send(f\"Uh oh. Looks like {target.mention} hasn't applied for anything.\")", "def _get_role(self):\n return self.__role", "async def apply_role(self, *, reason: str = None):\n if self.role not in self.member.roles:\n try:\n await self.member.add_roles(self.role, reason=reason)\n except discord.HTTPException:\n pass", "async def roletools(self, ctx: Context) -> None:", "async def disco(self, ctx, *, role: discord.Role):\n while True:\n colour = ''.join([choice('0123456789ABCDEF') for x in range(6)])\n colour = int(colour, 16)\n await self.bot.edit_role(ctx.message.server, role, colour=discord.Colour(value=colour))\n await asyncio.sleep(5)", "async def games(ctx):\n games_channel = discord.utils.get(ctx.message.author.guild.text_channels, name=CHANNEL_GAMES)\n member = ctx.message.author\n role = discord.utils.get(member.guild.roles, name=ROLE_GAMES)\n if role in member.roles:\n await member.remove_roles(role)\n await ctx.send(\"Removed you from the games club... feel free to come back anytime!\")\n await games_channel.send(f\"{member.mention} left the party.\")\n else:\n await member.add_roles(role)\n await ctx.send(f\"You are now in the channel. Come and have fun in {games_channel.mention}! :tada:\")\n await games_channel.send(f\"Please welcome {member.mention} to the party!!\")", "async def mute(self, ctx, member : discord.Member, *, reason : str):\r\n mutedRole = discord.utils.get(ctx.guild.roles, name = \"Muted\")\r\n if not mutedRole:\r\n channels = 0\r\n mutedRole = await ctx.guild.create_role(name=\"Muted\")\r\n for channel in ctx.guild.text_channels:\r\n await channel.set_permissions(mutedRole, send_messages=False)\r\n channels += 1 \r\n await ctx.send(f\"Successfully applied overwrites for {channels} channels\")\r\n await member.add_roles(mutedRole)\r\n embed = discord.Embed(title=\"Muted\", description = f\"You have been muted in **{ctx.guild.name}** by **{ctx.author}** **indefinetly** for reason **{reason}**\", colour = ctx.author.color, timestamp = datetime.datetime.now())\r\n await member.send(embed=embed)", "async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent):\n role: discord.Role = await self.check_payload(payload)\n if payload.user_id == self.bot.user.id:\n return\n if role:\n guild = self.bot.get_guild(payload.guild_id)\n member = guild.get_member(payload.user_id)\n await member.add_roles(role)", "def single_role(self):\n return None", "async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent):\n # Get the message id of the message that the user reacted to.\n message_id = payload.message_id\n\n # Get the message id of the message we want the user to react to.\n actual_message_id = constants.MessageIDs.RULES_MSGID\n\n # Compare that id's match, and if true continue to give the role.\n if message_id == actual_message_id:\n guild_id = payload.guild_id\n guild = self.bot.get_guild(guild_id)\n role = get(payload.member.guild.roles, name='Not Verified')\n\n if role is not None:\n member = get(guild.members, id=payload.user_id)\n if member is not None:\n await payload.member.add_roles(role)\n print(f\"Added role to {member}\")\n else:\n print(\"User not found . . .\")\n else:\n print(\"Role not found . . .\")", "async def removerole(self, ctx, member: discord.Member, role: discord.Role):\n role = discord.utils.get(ctx.guild.roles, id=role.id)\n\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to remove this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not remove *{role}* role using this command.\",\n description=\"For more information run ```.help removerole```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n if role not in member.roles:\n return await ctx.channel.send(\n embed=discord.Embed(\n title=f\"{member} doesn't have *{role}* Role!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.greyple(),\n )\n )\n\n await member.remove_roles(role)\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been removed from *{member}*\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def create(self, ctx: commands.Context, title: str, *roles: discord.Role):\n brake = {}\n\n def check(_r, _u):\n return _u == ctx.author and _r.message == me and str(_r.emoji) not in brake\n\n me = await ctx.send(f'React with the reaction that will correspond to the role `{roles[0]}`')\n\n # Yes I know this is an ugly solution to avoid an unnecessary api request but it is ultimately the only solution\n # I could think of.\n try:\n r, u = await self.bot.wait_for('reaction_add', timeout=len(roles) * 20, check=check)\n brake[str(r.emoji)] = roles[0].id\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out, please run the command again and this time be a little quicker to react.\")\n return\n\n try:\n for role in roles[1:]:\n await me.edit(content=f'React with the reaction that will correspond to the role `{role}`')\n r, u = await self.bot.wait_for('reaction_add', timeout=len(roles) * 20, check=check)\n brake[str(r.emoji)] = role.id\n except asyncio.TimeoutError:\n await ctx.send('Timed out, please run the command again and this time be a little quicker to react.')\n return\n\n await ctx.send(\n 'What channel do you wish to send this role menu in? Enter its id, name or mention it: #<channel>')\n\n for count in range(4):\n try:\n m = await self.bot.wait_for('message', check=lambda _m: _m.author == ctx.author and _m.channel == ctx.channel, timeout=30)\n chan = await self.tcc.convert(ctx, m.content)\n break\n except commands.BadArgument:\n if count == 3:\n await ctx.send(\"Too many tries to enter channel, make sure I can actually see the channel you're \"\n \"referring to and use the entire command again\")\n return\n await ctx.send(\"Please enter the correct channel, if in doubt, try mentioning it, the `#channel` thing\")\n except asyncio.TimeoutError:\n await ctx.send(\"Timed out\")\n return\n\n e = discord.Embed(title=f\"Role menu: {title}\",\n description=\"\\n\\n\".join(f\"{k} - {ctx.guild.get_role(v)}\"for k, v in brake.items()),\n colour=discord.Colour.blue())\n\n zero = await chan.send(embed=e)\n\n for k in brake:\n await zero.add_reaction(k)\n\n self._cache[ctx.guild.id][zero.id] = brake\n\n query = \"\"\"\n INSERT INTO selfrole_lookup (guildid, channelid, messageid) \n VALUES ($1, $2, $3)\n \"\"\"\n await self.bot.pool.execute(query, ctx.guild.id, chan.id, zero.id)\n\n query = \"\"\"\n INSERT INTO selfrole (messageid, emoji, roleid)\n VALUES ($1, $2, $3)\n \"\"\"\n for k, v in brake.items():\n await self.bot.pool.execute(query, zero.id, k, v)", "async def deny(self, ctx: commands.Context, target: discord.Member):\n try:\n accepter = get(ctx.guild.roles, id = await self.config.guild(ctx.guild).accepter_id())\n except TypeError:\n accepter = None\n if not accepter:\n if not ctx.author.guild_permissions.administrator:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n else:\n if accepter not in ctx.author.roles:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n try:\n applicant = get(ctx.guild.roles, id = await self.config.guild(ctx.guild).applicant_id())\n except TypeError:\n applicant = None\n if not applicant:\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n if not applicant:\n return await ctx.send(\"Uh oh, the configuration is not correct. Ask the Admins to set it.\")\n if applicant in target.roles:\n await ctx.send(\"Would you like to specify a reason? (yes/no)\")\n pred = MessagePredicate.yes_or_no(ctx)\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if pred.result:\n await ctx.send(\"Please, specify your reason now.\")\n\n def check(m):\n return m.author == ctx.author\n\n try:\n reason = await self.bot.wait_for(\n \"message\", timeout=120, check=check\n )\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n await target.send(\n f\"Your application in {ctx.guild.name} has been denied.\\n*Reason:* {reason.content}\"\n )\n else:\n await target.send(\n f\"Your application in {ctx.guild.name} has been denied.\"\n )\n await target.remove_roles(applicant)\n await ctx.send(f\"Denied {target.mention}'s application.\")\n else:\n await ctx.send(\n f\"Uh oh. Looks like {target.mention} hasn't applied for anything.\"\n )", "async def test_roles_command_command(self):\n self.ctx.guild.roles.append(self.moderator_role)\n\n self.cog.roles_info.can_run = unittest.mock.AsyncMock()\n self.cog.roles_info.can_run.return_value = True\n\n self.assertIsNone(await self.cog.roles_info(self.cog, self.ctx))\n self.ctx.send.assert_called_once()\n\n _, kwargs = self.ctx.send.call_args\n embed = kwargs.pop(\"embed\")\n\n self.assertEqual(embed.title, \"Role information (Total 1 role)\")\n self.assertEqual(embed.colour, discord.Colour.og_blurple())\n self.assertEqual(embed.description, f\"\\n`{self.moderator_role.id}` - {self.moderator_role.mention}\\n\")", "async def deny(self, ctx: commands.Context, target: discord.Member):\n try:\n accepter = get(ctx.guild.roles, id=await self.config.guild(ctx.guild).accepter_id())\n except TypeError:\n accepter = None\n if not accepter:\n if not ctx.author.guild_permissions.administrator:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n else:\n if accepter not in ctx.author.roles:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n try:\n applicant = get(ctx.guild.roles, id=await self.config.guild(ctx.guild).applicant_id())\n except TypeError:\n applicant = None\n if not applicant:\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n if not applicant:\n return await ctx.send(\n \"Uh oh, the configuration is not correct. Ask the Admins to set it.\"\n )\n if applicant in target.roles:\n await ctx.send(\"Would you like to specify a reason? (yes/no)\")\n pred = MessagePredicate.yes_or_no(ctx)\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if pred.result:\n await ctx.send(\"Please, specify your reason now.\")\n\n def check(m):\n return m.author == ctx.author\n\n try:\n reason = await self.bot.wait_for(\"message\", timeout=120, check=check)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n await target.send(\n f\"Your application in {ctx.guild.name} has been denied.\\n*Reason:* {reason.content}\"\n )\n else:\n await target.send(f\"Your application in {ctx.guild.name} has been denied.\")\n await target.remove_roles(applicant)\n await ctx.send(f\"Denied {target.mention}'s application.\")\n else:\n await ctx.send(f\"Uh oh. Looks like {target.mention} hasn't applied for anything.\")", "async def createRole(self, ctx):\n await self.deleteRole(ctx=ctx, reason=\"Dรฉbut de partie.\")\n await ctx.guild.create_role(name=self.categoryName)\n await asyncio.sleep(1)\n self.roleForPlayer = discord.utils.get(ctx.guild.roles, name=self.categoryName)\n print(\"Role created.\")\n member = await ctx.guild.fetch_member(bot.user.id)\n await member.add_roles(self.roleForPlayer, reason=\"Dรฉbut de partie.\")\n for member in ctx.author.voice.channel.members:\n await member.add_roles(self.roleForPlayer, reason=\"Dรฉbut de partie.\")", "def changeRole(self, node, role):", "async def x5cs(ctx):\n atr = ctx.author\n currentvc = atr.voice.channel.name\n usrs = atr.voice.channel.members\n role = ctx.guild.get_role(\"\"\"Insert role id\"\"\")\n pot = []\n \n #await ctx.send(atr.name +' '+currentvc)\n for i in usrs:\n if role in i.roles:\n #await ctx.send('O ' + str(i) + ' ร‰ ' + str(role))\n nick = (str(i.nick))\n if nick != 'None':\n pot.append(str(i.nick))\n else:\n pot.append(str(i))\n\n \n #print(pot) \n random.shuffle(pot)\n await ctx.send(pot)", "def update_client_roles(self,request,role):\n return\n #pdb.set_trace() #commend out by JZ on 10/3/14\n if not self.client_roles.has_key(request['protocol'].peer):\n self.client_roles.update({request['protocol'].peer:{role:time.time()}})\n else:\n self.client_roles[request['protocol'].peer].update({role:time.time()})", "async def update(self, ctx):\n\n # get the model data for the role assigner object\n data = await self.get_objects(\n model=RoleAssigner, filter={\"bot__name\": str(self.bot_name)}\n )\n\n # role assigner object\n data = data[0]\n\n # fetch the discord message\n guild_id = await self.get_deep_data(data, \"bot__server__uid\")\n\n guild = self.get_guild(int(guild_id))\n channel = self.get_channel(guild, int(data.message.cuid))\n message = await channel.fetch_message(int(data.message.uid))\n\n # update the message\n await message.edit(content=\"_ _\", embed=self.create_message_embed(data))\n\n await self.update_reactions(message, data)\n\n await ctx.send(\"Updated.\")", "def i_am(user_role):\n return user_role", "async def massadd(\n self,\n ctx,\n role: discord.Role,\n member: commands.Greedy[discord.Member],\n ):\n role = discord.utils.get(ctx.guild.roles, id=role.id)\n\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to add this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not assign *{role}* role using this command.\",\n description=\"For more information run ```.help massadd```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n for i in member:\n if role in i.roles:\n await ctx.channel.send(\n embed=discord.Embed(\n title=f\"*{i}* already has *{role}* Role!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.greyple(),\n )\n )\n\n await i.add_roles(role)\n\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been added to **{len(member)}** members!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def promoteto(self, ctx, *, member = None, role = None):\r\n if not await self._can_run(ctx): return\r\n em = discord.Embed(color = 0XFF8C00, description = \"Menaikan role xp member ke role yang ditentukan\\n\"\r\n \"Pastikan role xp sudah terdaftar dalam list\\n\\n\"\r\n \"**Panduan**\\n\"\r\n \"*`{}promoteto [member] [role]`*\"\r\n .format(ctx.prefix))\r\n em.set_footer(text = \"Saat mengetik command, tanda [] tidak usah digunakan.\\n{}\".format(ctx.author),\r\n icon_url = \"{}\".format(ctx.author.avatar_url))\r\n\r\n if member == None:\r\n return await ctx.send(embed=em)\r\n\r\n if role == None:\r\n # Either a role wasn't set - or it's the last section\r\n if type(member) is str:\r\n # It' a string - the hope continues\r\n # Let's search for a name at the beginning - and a role at the end\r\n parts = member.split()\r\n memFromName = None\r\n for j in range(len(parts)):\r\n # Reverse search direction\r\n i = len(parts)-1-j\r\n # Name = 0 up to i joined by space\r\n nameStr = ' '.join(parts[0:i+1])\r\n # Role = end of name -> end of parts joined by space\r\n roleStr = ' '.join(parts[i+1:])\r\n memFromName = DisplayName.memberForName(nameStr, ctx.guild)\r\n if memFromName:\r\n # We got a member - let's check for a role\r\n roleFromName = DisplayName.roleForName(roleStr, ctx.guild)\r\n \r\n if not roleFromName == None:\r\n # We got a member and a role - break\r\n role = roleFromName\r\n break\r\n if memFromName == None:\r\n # Never found a member at all\r\n msg = 'โ”(๏ฟฃใƒ˜๏ฟฃ;)โ”Œ\\nAku tidak dapat menemukan *{}* dalam server.'.format(member)\r\n msgDone = Utils.suppressed(ctx,msg)\r\n em = discord.Embed(color = 0XFF8C00, description = msgDone)\r\n em.set_footer(text = \"{}\".format(ctx.author), icon_url = \"{}\".format(ctx.author.avatar_url))\r\n return await ctx.send(embed = em)\r\n if roleFromName == None:\r\n # We couldn't find one or the other\r\n return await ctx.send(embed = em)\r\n\r\n member = memFromName\r\n\r\n # Get user's xp\r\n xp = int(self.settings.getUserStat(member, ctx.guild, \"XP\"))\r\n\r\n # Get the role list\r\n promoArray = self.getSortedRoles(ctx.guild)\r\n nextRole = self.getIndexForRole(role, ctx.guild)\r\n currentRole = self.getCurrentRoleIndex(member, ctx.guild)\r\n vowels = 'aeiou'\r\n\r\n if nextRole == None:\r\n em = discord.Embed(color = 0XFF8C00, description = \"> โ”(๏ฟฃใƒ˜๏ฟฃ;)โ”Œ\\n\"\r\n \"> Role **{}** tidak terdaftar dalam list role xp.\\n> \\n\"\r\n \"> Kamu dapat menambahkan role xp dengan cara:\\n\"\r\n \"> `{}addxprole [role] [jumlah xp]`\"\r\n .format(role.name,\r\n ctx.prefix))\r\n em.set_author(name = \"Role xp tidak terdaftar\", icon_url = \"https://cdn.discordapp.com/attachments/518118753226063887/725569194304733435/photo.jpg\")\r\n em.set_footer(name = \"Saat mengetik command, tanda [] tidak usah digunakan.\\nHelp command color\", text = f\"Request By : {ctx.author.name}\", icon_url = f\"{ctx.author.avatar_url}\")\r\n return await ctx.send(embed=em)\r\n \r\n if currentRole == nextRole:\r\n # We are already the target role\r\n if role.name[:1].lower() in vowels:\r\n msg = '*{}* sudah memiliki role **{}**.'.format(DisplayName.name(member), role.name)\r\n else:\r\n msg = '*{}* sudah memiliki role **{}**.'.format(DisplayName.name(member), role.name)\r\n msgDone = Utils.suppressed(ctx,msg)\r\n em = discord.Embed(color = 0XFF8C00, description = msgDone)\r\n em.set_footer(text = \"{}\".format(ctx.author), icon_url = \"{}\".format(ctx.author.avatar_url))\r\n return await ctx.send(embed = em)\r\n elif currentRole > nextRole:\r\n # We are a higher role than the target\r\n msg = '*{}* sudah memiliki role **{}** dalam koleksi role mereka.'.format(DisplayName.name(member), role.name)\r\n msgDone = Utils.suppressed(ctx,msg)\r\n em = discord.Embed(color = 0XFF8C00, description = msgDone)\r\n em.set_footer(text = \"{}\".format(ctx.author), icon_url = \"{}\".format(ctx.author.avatar_url))\r\n return await ctx.send(embed = em)\r\n\r\n if nextRole >= len(promoArray):\r\n msg = 'โ”(๏ฟฃใƒ˜๏ฟฃ;)โ”Œ\\nTidak ada role yang lebih tinggi untuk mempromosikan kenaikan role xp *{}*.'.format(DisplayName.name(member))\r\n else:\r\n newRole = DisplayName.roleForID(promoArray[nextRole]['ID'], ctx.guild)\r\n neededXp = int(promoArray[nextRole]['XP'])-xp\r\n self.settings.incrementStat(member, ctx.guild, \"XP\", neededXp)\r\n # Start at the bottom role and add all roles up to newRole\r\n addRoles = []\r\n for i in range(0, nextRole+1):\r\n addRole = DisplayName.roleForID(promoArray[i]['ID'], ctx.guild)\r\n if addRole:\r\n if not addRole in member.roles:\r\n addRoles.append(addRole)\r\n # await member.add_roles(*addRoles)\r\n # Use role manager instead\r\n self.settings.role.add_roles(member, addRoles)\r\n if not newRole:\r\n # Promotion role doesn't exist\r\n msg = 'โ”(๏ฟฃใƒ˜๏ฟฃ;)โ”Œ\\nSepertinya **{}** tidak ada dalam server.\\n*{}* akan tetap diberikan sejumlah *{:,} xp*, tapi aku tidak bisa menambahkan role yang tidak ada dalam list. Pertimbangkan lagi untuk merevisi role xp dalam server mu.'.format(promoArray[nextRole]['Name'], DisplayName.name(member), neededXp)\r\n else:\r\n msg = '*{}* telah di berikan sejumlah *{:,} xp* dan dinaikan ke role **{}**!'.format(DisplayName.name(member), neededXp, newRole.name)\r\n self.bot.dispatch(\"xp\", member, ctx.author, neededXp)\r\n msgDone = Utils.suppressed(ctx,msg)\r\n em = discord.Embed(color = 0XFF8C00, description = msgDone)\r\n em.set_footer(text = \"{}\".format(ctx.author), icon_url = \"{}\".format(ctx.author.avatar_url))\r\n return await ctx.send(embed = em)", "async def addrole(self, ctx, member: discord.Member, role: discord.Role):\n role = discord.utils.get(ctx.guild.roles, id=role.id)\n\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to add this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not assign *{role}* role using this command.\",\n description=\"For more information run ```.help addrole```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n if role in member.roles:\n return await ctx.channel.send(\n embed=discord.Embed(\n title=f\"*{member}* already has *{role}* Role!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.greyple(),\n )\n )\n\n await member.add_roles(role)\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been added to *{member}*\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def massremove(\n self,\n ctx,\n role: discord.Role,\n member: commands.Greedy[discord.Member],\n ):\n\n role = discord.utils.get(ctx.guild.roles, id=role.id)\n\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to remove this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not remove *{role}* role using this command.\",\n description=\"For more information run ```.help massremove```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n for i in member:\n if role not in i.roles:\n await ctx.channel.send(\n embed=discord.Embed(\n title=f\"*{i}* doesn't have *{role}* Role!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.greyple(),\n )\n )\n\n await i.remove_roles(role)\n\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been removed from **{len(member)}** members!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "async def on_raw_reaction_remove(self, payload: discord.RawReactionActionEvent):\n role: discord.Role = await self.check_payload(payload)\n\n if role:\n guild = self.bot.get_guild(payload.guild_id)\n member: discord.Member = guild.get_member(payload.user_id)\n await member.remove_roles(role)", "async def hexcode(self, ctx, *, role):\n if isinstance(role, discord.Role):\n await ctx.send(role.color)\n else:\n role = discord.utils.get(ctx.guild.roles, name = role)\n await ctx.send(role.color)", "async def addrole(self, ctx: context.CustomContext):\n\n await ctx.send(\n f\"{config.USER_INTERACTION_REQUIRED} Reply with the name of the role you want to create.\"\n )\n\n role_name = await ctx.converted_input(converter=converter.CaseInsensitiveRole)\n\n if isinstance(role_name, str):\n await ctx.send(\n f\"{config.YES} I will **create a new role** on this server named `{role_name}` for this.\"\n )\n try:\n discord_role = await ctx.guild.create_role(name=role_name)\n except discord.Forbidden:\n raise exceptions.ForbiddenError(\n exceptions.ForbiddenTask.CREATE_ROLE, role_name\n )\n\n else:\n discord_role = role_name\n\n await ctx.send(\n f\"{config.YES} I'll use the **pre-existing role** named `{discord_role.name}` for this.\"\n )\n\n role_join_message = await ctx.input(\n f\"{config.USER_INTERACTION_REQUIRED} Reply with a short message the user should see when they get the role.\"\n )\n\n try:\n await self.bot.db.execute(\n \"INSERT INTO selfrole (guild_id, role_id, join_message) VALUES ($1, $2, $3) \"\n \"ON CONFLICT (guild_id, role_id) DO UPDATE SET join_message = $3\",\n ctx.guild.id,\n discord_role.id,\n role_join_message,\n )\n except asyncpg.UniqueViolationError:\n return await ctx.send(\n f\"{config.NO} `{discord_role.name}` is already a selfrole on this server.\"\n )\n\n await ctx.send(f\"{config.YES} `{discord_role.name}` was added as a selfrole.\")", "async def tod_revenge(self, ctx, *args):\n roles = [y.name.lower() for y in ctx.author.roles]\n self.revenge = not self.revenge\n if self.revenge:\n message = \"Revenges are now __on__.\"\n else:\n message = \"Revenges are now __off__.\"\n await ctx.channel.send(message)", "async def userrole(self, ctx, *, role=None):\n server = ctx.message.guild\n\n if not role:\n result = await self.bot.db.config.find_one({'_id': str(server.id)})\n if result and result.get('user_role'):\n await ctx.send(f'The user role restricts which users are able to create and manage their own polls. \\n'\n f'The current user role is `{result.get(\"user_role\")}`. '\n f'To change it type `{result.get(\"prefix\")}userrole <role name>`')\n else:\n await ctx.send(f'The user role restricts which users are able to create and manage their own polls. \\n'\n f'No user role set. '\n f'To set one type `{result.get(\"prefix\")}userrole <role name>`')\n elif role in [r.name for r in server.roles]:\n await self.bot.db.config.update_one({'_id': str(server.id)}, {'$set': {'user_role': str(role)}}, upsert=True)\n await ctx.send(f'Server role `{role}` can now create and manage their own polls.')\n else:\n await ctx.send(f'Server role `{role}` not found.')", "async def rolemenu(self, message, args):\n if \"-h\" in args or \"--help\" in args or \"--ahelp\" in args:\n await self._print_rolemenu_help(message.channel)\n return\n update_reactions = not is_key_in_args(args, \"--no-update\")\n allow_role_change = not is_key_in_args(args, \"--no-change\")\n remove_role_on_reaction_removal = not is_key_in_args(args, \"--no-removal\")\n max_number_of_reactions_per_user = get_number_in_args(args, \"--max-reactions\", None)\n max_users_with_role = get_number_in_args(args, \"--max-users\", None)\n no_required_role = is_key_in_args(args, \"--all\")\n role_ids = message.raw_role_mentions\n if len(role_ids) * 2 != len(args):\n await message.channel.send(\"Error with `rolemenu`: all roles must match an emoji\")\n return await self._print_rolemenu_help(message.channel)\n menu = {}\n for i, role_id in enumerate(message.raw_role_mentions):\n emoji = args.pop(0)\n if emoji.strip(\"<>@&\") == str(role_id): # emoji and roles are just exchanged: no problem\n emoji = args.pop(0)\n elif args.pop(0).strip(\"<>@&\") != str(role_id): # error: two adjacent args must be role_id and emoji\n await message.channel.send(\"Error with `rolemenu`: a role must match an emoji\")\n return await self._print_rolemenu_help(message.channel)\n menu.update({emoji: [message.guild.get_role(role_id)]})\n manager = RoleByReactionManager.get(self.guild)\n options = RoleMenuOptions(required_roles=None if no_required_role else [RoleCollection.VISITOR.value],\n ignored_roles=[RoleCollection.MASTER.value],\n update_reactions=update_reactions,\n allow_role_change=allow_role_change,\n max_number_of_reactions_per_user=max_number_of_reactions_per_user,\n max_users_with_role=max_users_with_role,\n remove_role_on_reaction_removal=remove_role_on_reaction_removal\n )\n await manager.add(message, menu, options)\n logger.debug(\"Rolemenu created. Now you can edit your post to make it prettier.\")", "def changeRoleInfo(self, role, info):", "async def verify_agree(self, ctx: commands.Context):\n author = ctx.author\n joined_at = author.joined_at\n member_joined, since_joined = (\n author.joined_at.strftime(\"%d %b %Y %H:%M\"),\n (ctx.message.created_at - joined_at).days,\n )\n member_created, since_created = (\n author.created_at.strftime(\"%d %b %Y %H:%M\"),\n (ctx.message.created_at - author.created_at).days,\n )\n created_on = \"{}\\n({} days ago)\".format(member_created, since_created)\n joined_on = \"{}\\n({} days ago)\".format(member_joined, since_joined)\n author_avatar = author.avatar_url_as(static_format=\"png\")\n\n data = await self.config.guild(ctx.guild).all()\n log_config = data[\"logs\"]\n\n if not data[\"temprole\"] and not data[\"autoroles\"]:\n await ctx.send(\n (\n \"Sorry, there is no role configuration set. Please contact the moderation \"\n \"team of this server.\"\n ),\n delete_after=60,\n )\n self.log.warning(\"No role set. Unable to process verification.\")\n return\n\n try:\n result = await self._handle_role(author)\n await ctx.message.delete()\n except discord.Forbidden:\n await ctx.send(\n \"Error: I am unable to remove your role, please contact the moderation team.\"\n )\n return self.log.warning(\"Error: No permissions to remove roles.\")\n except discord.HTTPException as e:\n return self.log.warning(\"HTTPException: {} - {}\".format(e.status, e.code))\n if log_config is not None:\n embed = discord.Embed(color=discord.Color.green())\n embed.title = \"{}#{} - Verified\".format(author.name, author.discriminator)\n embed.set_thumbnail(url=author_avatar)\n embed.set_footer(text=\"User ID: {}\".format(author.id))\n embed.add_field(name=\"Account Creation:\", value=created_on, inline=True)\n embed.add_field(name=\"Joined Date:\", value=joined_on, inline=True)\n embed.add_field(name=\"Status:\", value=result[1], inline=True)\n try:\n await ctx.bot.get_channel(log_config).send(embed=embed)\n except discord.Forbidden:\n return self.log.warning(\n \"Error: Unable to send log message to {}\".format(\n ctx.bot.get_channel(log_config)\n )\n )\n except discord.HTTPException as e:\n return self.log.warning(\"HTTPException: {} - {}\".format(e.status, e.code))", "async def roles(self, ctx):\n\n pass", "async def setjoinrole(self, ctx, role):\r\n guild = ctx.message.guild\r\n role = discord.utils.get(guild.roles, name=role)\r\n functions.updatesql(server=ctx.guild.id, joinrole=role.id)\r\n await ctx.send(embed=discord.Embed(title='Sucsess!', color=discord.Colour.from_rgb(255, 0, 255)))", "async def ironman(self, ctx):\n if has_post_permission(ctx.guild.id, ctx.channel.id):\n out = (':tools: __**IRONMAN**__ :tools:\\n' \\\n 'If you want to become an ironman, please react to this post with a :thumbsup:. '\n 'This will **RESET** your account and give you the ironman role. '\n 'You will be unable to trade with other players or gamble. '\n 'In return, you will be able to proudly display your status as an ironman, '\n 'by the way.')\n msg = await ctx.send(out)\n\n if await self.confirm(ctx, msg, out):\n ctx.user_object.reset_account()\n ctx.user_object.is_ironman = True\n ctx.user_object.save()\n # ironman_role = discord.utils.get(ctx.guild.roles, name=\"Ironman\")\n # await ctx.author.add_roles(ironman_role, reason='Wanted to become an ironmeme.')\n name = get_display_name(ctx.author)\n await msg.edit(content=f':tools: __**IRONMAN**__ :tools:\\n'\n f'Congratulations, {name}, you are now '\n 'an ironman!')", "async def create(self, ctx):\n\n # get the model data for the role assigner object\n data = await self.get_objects(\n model=RoleAssigner, filter={\"bot__name\": str(self.bot_name)}\n )\n\n # role assigner object\n data = data[0]\n\n message = await ctx.send(\"_ _\", embed=self.create_message_embed(data))\n\n data.message.uid = message.id\n data.message.cuid = message.channel.id\n\n self.message_id = data.message.uid\n\n await self.update_reactions(message, data)\n\n await self.update_objects(model_instance=data)", "async def rolecolor(self, ctx, role: discord.Role):\n embed = discord.Embed(colour=role.colour, timestamp=datetime.datetime.utcnow())\n embed.add_field(name=\"Name\", value=role.name)\n embed.add_field(name=\"Hex Value\", value=role.color)\n await ctx.send(embed=embed)", "def clean_role():", "def comsume_msg(self, msg_type):", "async def on_raw_reaction_add(self, payload):\n\n # exclude all reactions which are not the original message\n if str(payload.message_id) != self.message_id:\n return\n\n # exclude the bot\n if payload.user_id == self.bot.user.id:\n return\n\n else:\n # get the model data for the role assigner object\n data = await self.get_objects(\n model=RoleAssigner, filter={\"bot__name\": str(self.bot_name)}\n )\n\n # role assigner object\n data = data[0]\n\n guild = self.get_guild(guild_id=payload.guild_id)\n\n user = self.get_user(guild=guild, user_id=payload.user_id)\n\n for db_role in data.roles.all():\n\n if db_role.emoji.startswith(\":\") and db_role.emoji.endswith(\":\"):\n\n ce = db_role.emoji[1:-1]\n\n else:\n ce = db_role.emoji\n\n if str(payload.emoji.name) == str(ce):\n\n role = self.get_role(guild, int(db_role.uid))\n\n if user not in role.members:\n\n await user.add_roles(role)\n\n print(\"Added \" + str(user) + \" to role: \" + str(role) + \"!\")\n\n else:\n print(\n \"User \" + str(user) + \" already in role: \" + str(role) + \"!\"\n )\n\n pass", "async def mute(self, ctx, user: discord.Member, time_and_unit=None, *, reason: str = None):\r\n server = ctx.message.guild\r\n channel = ctx.message.channel\r\n author = ctx.message.author\r\n if channel.permissions_for(user).administrator:\r\n await ctx.send(\"That user has administrator perms, why would I even try :no_entry:\")\r\n return\r\n if user.top_role.position >= author.top_role.position:\r\n if author == server.owner:\r\n pass\r\n else:\r\n await ctx.send(\"You can not mute someone higher than your own role :no_entry:\")\r\n return\r\n if not time_and_unit:\r\n time2 = 600\r\n time = \"10\"\r\n unit = \"minutes\"\r\n else:\r\n try:\r\n unit = time_and_unit[len(time_and_unit) - 1:len(time_and_unit)]\r\n except ValueError:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n try:\r\n time = time_and_unit[0:len(time_and_unit) - 1]\r\n except ValueError:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n if unit == \"s\":\r\n try:\r\n time2 = int(time)\r\n except ValueError:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n if time == \"1\":\r\n unit = \"second\"\r\n else:\r\n unit = \"seconds\"\r\n elif unit == \"m\":\r\n try:\r\n time2 = int(time) * 60\r\n except ValueError:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n if time == \"1\":\r\n unit = \"minute\"\r\n else:\r\n unit = \"minutes\"\r\n elif unit == \"h\":\r\n try:\r\n time2 = int(time) * 3600\r\n except ValueError:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n if time == \"1\":\r\n unit = \"hour\"\r\n else:\r\n unit = \"hours\"\r\n elif unit == \"d\":\r\n try:\r\n time2 = int(time) * 86400\r\n except ValueError:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n if time == \"1\":\r\n unit = \"day\"\r\n else:\r\n unit = \"days\"\r\n else:\r\n await ctx.send(\"Invalid time unit :no_entry:\")\r\n return\r\n action = \"Mute ({} {})\".format(time, unit)\r\n if str(server.id) not in self.d:\r\n self.d[str(server.id)] = {}\r\n dataIO.save_json(self.file, self.d)\r\n if str(user.id) not in self.d[str(server.id)]:\r\n self.d[str(server.id)][str(user.id)] = {}\r\n dataIO.save_json(self.file, self.d)\r\n if \"toggle\" not in self.d[str(server.id)][str(user.id)]:\r\n self.d[str(server.id)][str(user.id)][\"toggle\"] = False\r\n dataIO.save_json(self.file, self.d)\r\n if \"time\" not in self.d[str(server.id)][str(user.id)]:\r\n self.d[str(server.id)][str(user.id)][\"time\"] = None\r\n dataIO.save_json(self.file, self.d)\r\n if \"amount\" not in self.d[str(server.id)][str(user.id)]:\r\n self.d[str(server.id)][str(user.id)][\"amount\"] = None\r\n dataIO.save_json(self.file, self.d)\r\n role = discord.utils.get(server.roles, name=\"Muted - Sensei\")\r\n overwrite = discord.PermissionOverwrite()\r\n overwrite.send_messages = False\r\n perms = discord.PermissionOverwrite()\r\n perms.speak = False\r\n if not role:\r\n role = await server.create_role(name=\"Muted - Sensei\")\r\n for channels in ctx.guild.text_channels:\r\n await channels.set_permissions(role, overwrite=overwrite)\r\n for channels in ctx.guild.voice_channels:\r\n await channels.set_permissions(role, overwrite=perms)\r\n if role in user.roles:\r\n await ctx.send(\"**{}** is already muted :no_entry:\".format(user))\r\n return\r\n try:\r\n await user.add_roles(role)\r\n except:\r\n await ctx.send(\"I cannot add the mute role to the user :no_entry:\")\r\n return\r\n await ctx.send(f\"**{user}** has been muted for {time} {unit} {self.bot.get_emoji(470063310386233344)}\")\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n self.d[str(server.id)][str(user.id)][\"toggle\"] = True\r\n self.d[str(server.id)][str(user.id)][\"amount\"] = time2\r\n self.d[str(server.id)][str(user.id)][\"time\"] = ctx.message.created_at.timestamp()\r\n dataIO.save_json(self.file, self.d)\r\n try:\r\n s = discord.Embed(title=\"You have been muted in {} :speak_no_evil:\".format(server.name), colour=0xfff90d,\r\n timestamp=__import__('datetime').datetime.utcnow())\r\n s.add_field(name=\"Moderator\", value=\"{} ({})\".format(author, str(author.id)), inline=False)\r\n s.add_field(name=\"Time\", value=\"{} {}\".format(time, unit), inline=False)\r\n if reason:\r\n s.add_field(name=\"Reason\", value=reason, inline=False)\r\n await user.send(embed=s)\r\n except:\r\n pass", "async def command_rolecall(self, context):\n print(self._fetch_category_roles(context))\n print(self._fetch_category_roles(context, COSMETIC_CATEGORY_NAME))", "async def on_guild_role_create(self, role):\n channel = self.client.get_channel(serverlogs.getChannel(role.guild.id, \"roles\"))\n if channel is not None:\n await self.log_role(role=role, type='Create', channel=channel, guild=role.guild)", "async def alumni(ctx):\n member = ctx.message.author\n div_a_role = discord.utils.get(member.guild.roles, name=ROLE_DIV_A)\n div_b_role = discord.utils.get(member.guild.roles, name=ROLE_DIV_B)\n div_c_role = discord.utils.get(member.guild.roles, name=ROLE_DIV_C)\n await member.remove_roles(div_a_role, div_b_role, div_c_role)\n role = discord.utils.get(member.guild.roles, name=ROLE_ALUMNI)\n if role in member.roles:\n await member.remove_roles(role)\n await ctx.send(\"Removed your alumni status.\")\n else:\n await member.add_roles(role)\n await ctx.send(f\"Added the alumni role, and removed all other division roles.\")", "def role_from_first_message( # pylint: disable=unused-argument\n message: Message, receiver_address: Address\n) -> BaseDialogue.Role:\n return BaseGymDialogue.Role.AGENT", "def role(self):\n return ['Server', 'Client'][self.is_client()]", "async def unlock(ctx):\n member = ctx.message.author\n channel = ctx.message.channel\n\n if (channel.category.name in [\"beta\", \"staff\", \"Pi-Bot\"]):\n return await ctx.send(\"This command is not suitable for this channel because of its category.\")\n\n if (channel.category.name == CATEGORY_SO or channel.category.name == CATEGORY_GENERAL):\n await ctx.send(\"Synced permissions with channel category.\")\n return await channel.edit(sync_permissions=True)\n\n member_role = discord.utils.get(member.guild.roles, name=ROLE_MR)\n if (channel.category.name != CATEGORY_STATES):\n await ctx.channel.set_permissions(member_role, add_reactions=True, send_messages=True, read_messages=True)\n else:\n await ctx.channel.set_permissions(member_role, add_reactions=True, send_messages=True)\n\n wiki_role = discord.utils.get(member.guild.roles, name=ROLE_WM)\n gm_role = discord.utils.get(member.guild.roles, name=ROLE_GM)\n aRole = discord.utils.get(member.guild.roles, name=ROLE_AD)\n bRole = discord.utils.get(member.guild.roles, name=ROLE_BT)\n await ctx.channel.set_permissions(wiki_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(gm_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(aRole, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(bRole, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.send(\"Unlocked the channel to Member access. Please check if permissions need to be synced.\")", "def acutalizeRolelabel (self, data):\n if data.has_key(StatusMsg.label_role):\n self.rolelabel.setText(self.roleDecoder.get(data.get(StatusMsg.label_role)))", "async def addrole(self, ctx, rolename, user: discord.Member=None):\n author = ctx.message.author\n channel = ctx.message.channel\n server = ctx.message.server\n\n if user is None:\n user = author\n\n role = self._role_from_string(server, rolename)\n\n if role is None:\n await self.bot.say('That role cannot be found.')\n return\n\n if not channel.permissions_for(server.me).manage_roles:\n await self.bot.say('I don\\'t have manage_roles.')\n return\n\n if author.id == settings.owner:\n pass\n elif not channel.permissions_for(author).manage_roles:\n raise commands.CheckFailure\n\n await self.bot.add_roles(user, role)\n await self.bot.say('Added role {} to {}'.format(role.name, user.name))", "async def on_raw_reaction_remove(self, payload):\n\n # exclude all reactions which are not the original message\n if str(payload.message_id) != self.message_id:\n return\n\n # exclude the bot\n if payload.user_id == self.bot.user.id:\n return\n\n else:\n # get the model data for the role assigner object\n data = await self.get_objects(\n model=RoleAssigner, filter={\"bot__name\": str(self.bot_name)}\n )\n\n # role assigner object\n data = data[0]\n\n guild = self.get_guild(guild_id=payload.guild_id)\n\n user = self.get_user(guild=guild, user_id=payload.user_id)\n\n for db_role in data.roles.all():\n\n if db_role.emoji.startswith(\":\") and db_role.emoji.endswith(\":\"):\n\n ce = db_role.emoji[1:-1]\n\n else:\n ce = db_role.emoji\n\n if str(payload.emoji.name) == str(ce):\n\n role = self.get_role(guild, int(db_role.uid))\n\n if user in role.members:\n\n await user.remove_roles(role)\n\n print(\"Removed \" + str(user) + \" from role: \" + str(role) + \"!\")\n\n else:\n print(\"User \" + str(user) + \" not in role: \" + str(role) + \"!\")\n\n pass", "def grant_role(self, role, principal_ids):", "async def cry(self,ctx,user: discord.Member=None):\n if user == None or user.id == ctx.author.id:\n await ctx.send(\"{}\".format(ctx.author.mention))\n else:\n await ctx.send(\"{} {}\".format(ctx.author.mention, user.mention))\n img = random.choice(self.getreaction(\"cry\", \"0\"))\n embed = discord.Embed(colour=ctx.guild.me.top_role.colour)\n embed.set_image(url=img)\n await ctx.send(embed=embed)", "async def rolemenu_create(self,\n interaction: discord.Interaction,\n name: str,\n channel: discord.TextChannel,\n mode: int,\n color: str,\n placeholder: str,\n description: str = None):\n doc = await self.db.find_one({\n \"guild_id\": interaction.guild.id,\n \"name\": name\n })\n if doc:\n return await interaction.response.send_message(\n \"Role menu with that name exists.\", ephemeral=True)\n if channel.guild != interaction.guild:\n return await interaction.response.send_message(\n \"This channel is not in this server.\")\n if not channel.permissions_for(interaction.guild.me).send_messages:\n return await interaction.response.send_message(\n \"I don't have permission to send messages in this channel.\")\n try:\n color = discord.Color.from_str(color)\n except ValueError:\n return await interaction.response.send_message(\"Invalid color.\")\n await self.db.insert_one({\n \"message_id\": None,\n \"name\": name,\n \"mode\": mode,\n \"channel_id\": channel.id,\n \"roles\": [],\n \"guild_id\": interaction.guild.id,\n \"color\": color.value,\n \"placeholder\": placeholder,\n \"description\": description\n })\n await interaction.response.send_message(\n \"Role menu created. It is currently empty, however, and \"\n \"you'll need to add roles with `/rolemenu role add.`\",\n ephemeral=True)", "async def rolemenu_add_role(self,\n interaction: discord.Interaction,\n name: str,\n role: discord.Role,\n emoji: str = None,\n description: str = None):\n doc = await self.db.find_one({\n \"guild_id\": interaction.guild.id,\n \"name\": name\n })\n if not doc:\n return await interaction.response.send_message(\n \"No role menu with that name exists.\", ephemeral=True)\n for role_doc in doc[\"roles\"]:\n if role_doc[\"id\"] == role.id:\n return await interaction.followup.send(\n \"Role is already in the menu.\", ephemeral=True)\n if len(doc[\"roles\"]) >= 25:\n return await interaction.response.send_message(\n \"This role menu is full.\", ephemeral=True)\n await interaction.response.defer(ephemeral=True)\n if role.guild != interaction.guild:\n return await interaction.response.send_message(\n \"This role is not in this server.\")\n if emoji:\n if emoji.startswith(\"<\") and emoji.endswith(\">\"):\n try:\n emoji = int(emoji[1:-1].split(\":\")[2])\n except ValueError:\n return await interaction.followup.send(\"Invalid emoji.\")\n else:\n try:\n message = await interaction.original_message()\n await message.add_reaction(emoji)\n except discord.HTTPException:\n return await interaction.followup.send(\"Invalid emoji.\")\n await self.db.update_one({\"_id\": doc[\"_id\"]}, {\n \"$push\": {\n \"roles\": {\n \"description\": description,\n \"id\": role.id,\n \"emoji\": emoji,\n \"date_added\": datetime.datetime.now(datetime.datetime.u)\n }\n }\n })\n doc = await self.db.find_one({\"_id\": doc[\"_id\"]})\n await interaction.followup.send(f\"Added {role.mention} to the menu.\")\n menu = Menu(self, interaction.guild, doc)\n await menu.update()", "async def warn(self, ctx, user: discord.Member, *, reason: str = None):\r\n author = ctx.message.author\r\n server = ctx.message.guild\r\n channel = ctx.message.channel\r\n if user == author:\r\n await ctx.send(\"You can not warn yourself :no_entry:\")\r\n return\r\n if user.top_role.position >= author.top_role.position:\r\n if author == server.owner:\r\n pass\r\n else:\r\n await ctx.send(\"You can not warn someone higher than your own role :no_entry:\")\r\n return\r\n if str(server.id) not in self.d:\r\n self.d[str(server.id)] = {}\r\n dataIO.save_json(self.file, self.d)\r\n if str(user.id) not in self.d[str(server.id)]:\r\n self.d[str(server.id)][str(user.id)] = {}\r\n dataIO.save_json(self.file, self.d)\r\n if \"muted\" not in self.d[str(server.id)][str(user.id)]:\r\n self.d[str(server.id)][str(user.id)][\"toggle\"] = False\r\n dataIO.save_json(self.file, self.d)\r\n if \"time\" not in self.d[str(server.id)][str(user.id)]:\r\n self.d[str(server.id)][str(user.id)][\"time\"] = None\r\n dataIO.save_json(self.file, self.d)\r\n if \"amount\" not in self.d[str(server.id)][str(user.id)]:\r\n self.d[str(server.id)][str(user.id)][\"amount\"] = None\r\n dataIO.save_json(self.file, self.d)\r\n role = discord.utils.get(server.roles, name=\"Muted - Sensei\")\r\n overwrite = discord.PermissionOverwrite()\r\n overwrite.send_messages = False\r\n perms = discord.PermissionOverwrite()\r\n perms.speak = False\r\n if not role:\r\n role = await server.create_role(name=\"Muted - Sensei\")\r\n for channels in server.text_channels:\r\n await channels.set_permissions(role, overwrite=overwrite)\r\n for channels in server.voice_channels:\r\n await channels.set_permissions(role, overwrite=perms)\r\n await self._create_warn(server, user)\r\n if reason:\r\n if reason not in self.data[str(server.id)][\"user\"][str(user.id)][\"reasons\"]:\r\n self.data[str(server.id)][\"user\"][str(user.id)][\"reasons\"][reason] = {}\r\n self.data[str(server.id)][\"user\"][str(user.id)][\"warnings\"] = self.data[str(server.id)][\"user\"][str(user.id)][\r\n \"warnings\"] + 1\r\n dataIO.save_json(self.JSON, self.data)\r\n if self.data[str(server.id)][\"user\"][str(user.id)][\"warnings\"] == 1:\r\n await ctx.send(\"**{}** has been warned :warning:\".format(user))\r\n s = discord.Embed(colour=000000, timestamp=__import__('datetime').datetime.utcnow())\r\n s.set_author(name=\"You have been warned in {}\".format(server.name), icon_url=server.icon_url)\r\n try:\r\n s.add_field(name=\"Reason\", value=reason, inline=False)\r\n except:\r\n s.add_field(name=\"Reason\", value=\"None Given\", inline=False)\r\n s.add_field(name=\"Moderator\", value=author)\r\n s.add_field(name=\"Next Action\", value=\"Mute\")\r\n action = \"Warn\"\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n if self.data[str(server.id)][\"user\"][str(user.id)][\"warnings\"] == 2:\r\n try:\r\n await user.add_roles(role)\r\n self.d[str(server.id)][str(user.id)][\"toggle\"] = True\r\n self.d[str(server.id)][str(user.id)][\"amount\"] = 600\r\n self.d[str(server.id)][str(user.id)][\"time\"] = ctx.message.created_at.timestamp()\r\n dataIO.save_json(self.file, self.d)\r\n except:\r\n await ctx.send(\"I cannot add the mute role to the user :no_entry:\")\r\n return\r\n await ctx.send(\"**{}** has been muted due to their second warning :white_check_mark:\".format(user))\r\n s = discord.Embed(colour=000000, timestamp=__import__('datetime').datetime.utcnow())\r\n s.set_author(name=\"You have been muted in {}\".format(server.name), icon_url=server.icon_url)\r\n try:\r\n s.add_field(name=\"Reason\", value=reason, inline=False)\r\n except:\r\n s.add_field(name=\"Reason\", value=\"None Given\", inline=False)\r\n s.add_field(name=\"Moderator\", value=author)\r\n s.add_field(name=\"Next Action\", value=\"Kick\")\r\n action = \"Mute\"\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n await asyncio.sleep(600)\r\n if role in user.roles:\r\n try:\r\n await user.remove_roles(role)\r\n except:\r\n pass\r\n self.d[str(server.id)][str(user.id)][\"toggle\"] = False\r\n dataIO.save_json(self.file, self.d)\r\n action = \"Unmute\"\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n if self.data[str(server.id)][\"user\"][str(user.id)][\"warnings\"] == 3:\r\n try:\r\n await server.kick(user, reason=\"Kick made by {}\".format(author))\r\n except:\r\n await ctx.send(\"I'm not able to kick that user :no_entry:\")\r\n return\r\n await ctx.send(\"**{}** has been kicked due to their third warning :white_check_mark:\".format(user))\r\n s = discord.Embed(colour=000000, timestamp=__import__('datetime').datetime.utcnow())\r\n s.set_author(name=\"You have been kicked from {}\".format(server.name), icon_url=server.icon_url)\r\n try:\r\n s.add_field(name=\"Reason\", value=reason, inline=False)\r\n except:\r\n s.add_field(name=\"Reason\", value=\"None Given\", inline=False)\r\n s.add_field(name=\"Moderator\", value=author)\r\n s.add_field(name=\"Next Action\", value=\"Ban\")\r\n action = \"Kick\"\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n if self.data[str(server.id)][\"user\"][str(user.id)][\"warnings\"] >= 4:\r\n try:\r\n await server.ban(user, reason=\"Ban made by {}\".format(author))\r\n except:\r\n await ctx.send(\"I'm not able to ban that user :no_entry:\")\r\n del self.data[str(server.id)][\"user\"][str(user.id)]\r\n dataIO.save_json(self.JSON, self.data)\r\n return\r\n await ctx.send(\"**{}** has been banned due to their fourth warning :white_check_mark:\".format(user))\r\n await server.ban(user, reason=\"Ban made by {}\".format(author))\r\n s = discord.Embed(colour=000000, timestamp=__import__('datetime').datetime.utcnow())\r\n s.set_author(name=\"You have been banned from {}\".format(server.name), icon_url=server.icon_url)\r\n try:\r\n s.add_field(name=\"Reason\", value=reason, inline=False)\r\n except:\r\n s.add_field(name=\"Reason\", value=\"None Given\", inline=False)\r\n s.add_field(name=\"Moderator\", value=author)\r\n s.add_field(name=\"Next Action\", value=\"None\")\r\n del self.data[str(server.id)][\"user\"][str(user.id)]\r\n dataIO.save_json(self.JSON, self.data)\r\n action = \"Ban\"\r\n try:\r\n await self._log(author, server, action, reason, user)\r\n except:\r\n pass\r\n try:\r\n await user.send(embed=s)\r\n except:\r\n pass", "async def command_assign_role(self, context, role: str):\n try:\n await context.author.add_roles(discord.utils.get(\n context.guild.roles, name=role))\n await context.message.add_reaction('๐Ÿ‘')\n except Exception as e:\n await context.message.add_reaction('๐Ÿ‘Ž')\n await context.send('Role could not be assigned')\n print(f'Errored in command_assign_role.', e)", "def role(self):\n return self._role", "def role(self):\n return self._role", "def role(self):\n return self._role", "async def tagrole(self, ctx):\r\n\t\trole = self.settings.Get(ctx, 'role', self.settings.ServerConfig(ctx.guild.id, 'TagRole'))\r\n\t\tif not role: return await ctx.send('Can\\'t find role: {}'.format(role))\r\n\t\tawait ctx.send('Tag role set to: {}'.format(role))", "def role(self, role):\n\n self._role = int(role)", "async def _role_player(self, ctx: Context, *, role: discord.Role):\n\n msg = await ctx.send(\n _(\n \"Are you sure you want to set `{}` as player role?\"\n ).format(role.name)\n )\n start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS)\n\n pred = ReactionPredicate.yes_or_no(msg, ctx.author)\n await ctx.bot.wait_for(\"reaction_add\", check=pred)\n\n if pred.result:\n await self.config.guild(ctx.guild).player_id.set(role.id)\n await ctx.send(_(\"Set `{}` as player role!\").format(role.name))\n else:\n await ctx.send(_(\"Aborted player role setup.\"))", "async def plaguedoctor(self, ctx):\n currency = await bank.get_currency_name(ctx.guild)\n await self.config.user(ctx.author).gameRole.set(\"Doctor\")\n await self.notify_user(ctx=ctx, user=ctx.author, notificationType=\"doctor\")\n await ctx.send(f\"{ctx.author} has spent 10,000 {currency} and become a Doctor.\")", "async def on_member_join(self, member):\n sid = member.server.id\n role = await self.get_role(member.server)\n try:\n muterole = self.riceCog2[server.id][\"muterole\"]\n except:\n muterole = default_muterole \n\n if 'poop' in self.riceCog2[sid]:\n if self.riceCog2[sid]['poop'] == True:\n if member.id in self.riceCog[sid]:\n if count >= 1:\n count = self.riceCog[sid][member.id][\"Count\"]\n poops = \"\\U0001f528\" * count\n role_name = \"Warning {}\".format(poops)\n is_there = False\n colour = 0xbc7642\n for role in member.server.roles:\n if role.name == role_name:\n poop_role = role\n is_there = True\n if not is_there:\n server = member.server\n poop_role = await self.bot.create_role(server)\n await self.bot.edit_role(role=poop_role,\n name=role_name,\n server=server)\n try:\n await self.bot.add_roles(member,\n poop_role)\n except discord.errors.Forbidden:\n await self.bot.say(\"No permission to add roles\")\n else:\n pass\n if member.id in self.norole[sid]:\n if self.norole[sid]['role'] == True:\n role = discord.utils.get(member.server.roles, name=\"NoBNL\")\n await self.bot.add_roles(member, role)\n \n if not role or not (sid in self.json and member.id in self.json[sid]):\n return\n\n duration = self.json[sid][member.id]['until'] - time.time()\n if duration > 0:\n role = discord.utils.get(member.server.roles, name=muterole)\n await self.bot.add_roles(member, role)\n\n reason = 'Punishment re-added on rejoin. '\n if self.json[sid][member.id]['reason']:\n reason += self.json[sid][member.id]['reason']\n\n if member.id not in self.handles[sid]:\n self.schedule_unpunish(duration, member, reason)", "async def perm_check(ctx,roles_list: List[int]):\n for n,role in enumerate(ctx.author.roles):\n # If authorized\n if role.id in roles_list:\n return \"pass\"\n # Not authorized\n if n == len(ctx.author.roles) - 1:\n return await ctx.send(embed=Embed(title=\"> **โš  Attention !**\",description=\"Vous n'avez pas la permission d'รฉxecutez cette commande !\",color=Colour.from_rgb(255,255,0)).set_author(name=ctx.author.name,icon_url=ctx.author.avatar_url))", "async def build(self, ctx: Context):\n # Simulates that the bot is typing to visually show user command is being processed\n async with ctx.typing(): \n start_time = time()\n description= f'{emojis[\"discord\"]} Configuring `{ctx.guild.name}` for verification...\\n'\n\n guild = ctx.message.guild\n bot_role = get(ctx.guild.roles, name='AberLink')\n everyone_role = get(ctx.guild.roles, name='@everyone')\n verified_role = get(ctx.guild.roles, name='verified')\n verify_channel = get(guild.channels, name='verify')\n verify_perms = discord.PermissionOverwrite()\n verified_role_perms = discord.Permissions(\n send_messages=True, read_messages=True, read_message_history=True, \n change_nickname=True, embed_links=True, attach_files=True, \n add_reactions=True, external_emojis=True, \n connect=True, speak=True, stream=True, use_voice_activation=True\n )\n\n #make sure that the bots position is above that of verification\n bot_role.edit(position=2)\n\n # Change permissions on @everyone role\n await everyone_role.edit(reason='Configuring everyone role for verify', permissions=discord.Permissions())\n description += f'{int((time() - start_time) * 1000)}ms: `@everyone` removed all permissions\\n'\n # {int((end_time - start_time) * 1000)}\n \n # Create or modify verified role\n if verified_role is not None:\n await verified_role.edit(reason='Updating old verified role', permissions=verified_role_perms)\n description += f'{int((time() - start_time) * 1000)}ms: `verified` role already exists, updating to match permissions...\\n'\n else:\n verified_role = await guild.create_role(reason='Creating verified role', name='verified', permissions=verified_role_perms)\n verified_role.edit(position=1)\n description += f'{int((time() - start_time) * 1000)}ms: `verified` role created\\n'\n \n # Gives the bot the verified role\n bot = await guild.fetch_member(ctx.bot.user.id)\n await bot.add_roles(verified_role)\n\n # Create or modify verify channel\n if verify_channel is not None:\n description += f'{int((time() - start_time) * 1000)}ms: `verify` channel already exists, updating to match permissions...\\n'\n message = await verify_channel.send(f'Welcome to `{guild.name}`! If you are seeing this message then please type `!verify`')\n await message.pin()\n else:\n verify_channel = await guild.create_text_channel('verify')\n description += f'{int((time() - start_time) * 1000)}ms: `verify` channel created\\n'\n message = await verify_channel.send(f'Welcome to `{guild.name}`! If you are seeing this message then please type `!verify`')\n await message.pin()\n \n # Set permissions for roles in verify channel\n verify_perms.read_messages = True\n verify_perms.send_messages = True\n verify_perms.read_message_history = True\n await verify_channel.set_permissions(everyone_role, overwrite=verify_perms)\n verify_perms.read_messages = False\n verify_perms.send_messages = False\n await verify_channel.set_permissions(verified_role, overwrite=verify_perms)\n description += f'{emojis[\"aberlink\"]} This server is now setup for verification!'\n embed = Embed(description=description, colour=discord.Colour.green())\n await ctx.send(embed=embed)", "async def vouch(ctx, *, member_name=None):\n if ctx.message.channel.name.lower() not in bot_channels:\n return\n\n server = ctx.message.server\n member_roles = ctx.message.author.roles\n member_admin = discord.utils.find(lambda r: r.name.lower() in admin_roles, member_roles)\n if member_admin is not None:\n member = discord.utils.find(lambda c: c.name.lower() == member_name.lower(), server.members)\n roles = member.roles\n new_role = discord.utils.find(lambda r: r.name.lower() == required_role, server.roles)\n roles.append(new_role)\n await amor_manager.replace_roles(member, *roles)\n await amor_manager.say('{0} granted citizenship'.format(member.name))", "def getRoleInfo(self, role):", "def role(self):\n _DEPRECATION_ERROR_ATTRIBUTE(\n self, \"role\", \"Use attribute 'construct_type' instead\"\n ) # pragma: no cover", "def setRole(self, room, nick, role):\n if role not in ('moderator', 'participant', 'visitor', 'none'):\n raise TypeError\n query = ET.Element('{http://jabber.org/protocol/muc#admin}query')\n item = ET.Element('item', {'role':role, 'nick':nick}) \n query.append(item)\n iq = self.xmpp.makeIqSet(query)\n iq['to'] = room\n result = iq.send()\n if result is False or result['type'] != 'result':\n raise ValueError\n return True", "async def roleinfo(self, ctx, role: discord.Role):\n embed = discord.Embed(\n title=\"Role Info\", colour=role.colour, timestamp=datetime.datetime.utcnow()\n )\n embed.add_field(name=\"ID\", value=role.id)\n embed.add_field(name=\"Name\", value=role.name)\n embed.add_field(name=\"Members\", value=str(len(role.members)))\n embed.add_field(\n name=\"Created At\", value=role.created_at.strftime(\"%d/%m/%Y, %H:%M\")\n )\n embed.add_field(name=\"Hoisted\", value=role.hoist)\n embed.add_field(name=\"Mentionable\", value=role.mentionable)\n if role.name == \"@everyone\":\n embed.add_field(name=\"Mention\", value=role.name)\n else:\n embed.add_field(name=\"Mention\", value=role.mention)\n embed.add_field(name=\"Position\", value=role.position)\n await ctx.send(embed=embed)" ]
[ "0.71692383", "0.6773015", "0.67139935", "0.671385", "0.6711312", "0.6706758", "0.66896105", "0.6675753", "0.66679424", "0.6658029", "0.65519524", "0.6475669", "0.6427986", "0.64168537", "0.6382526", "0.6370835", "0.63680696", "0.63477904", "0.63449776", "0.63068354", "0.62869173", "0.62821645", "0.62748575", "0.6272479", "0.626945", "0.62662137", "0.625472", "0.6252803", "0.6225996", "0.62082267", "0.6189307", "0.6184422", "0.6183897", "0.61838645", "0.6181683", "0.6160095", "0.61575925", "0.6144737", "0.6141668", "0.6136102", "0.61148155", "0.6091519", "0.60777026", "0.6077406", "0.60770905", "0.6073275", "0.6066265", "0.60618734", "0.603372", "0.60270154", "0.6025445", "0.6021772", "0.6015925", "0.60148716", "0.6008455", "0.5981047", "0.5978516", "0.5964696", "0.59467757", "0.59433967", "0.59365565", "0.5935458", "0.5920586", "0.5907607", "0.59039444", "0.5897818", "0.5894194", "0.5888107", "0.58722454", "0.58698416", "0.58636874", "0.58561283", "0.5848557", "0.5846636", "0.5843869", "0.5829994", "0.5826023", "0.5823393", "0.5822004", "0.5809459", "0.58026487", "0.5802233", "0.57970536", "0.57936054", "0.57906955", "0.5790457", "0.5790457", "0.5790457", "0.5789", "0.57864404", "0.57859874", "0.5769734", "0.57623005", "0.5760832", "0.5760209", "0.57548743", "0.57499576", "0.5748509", "0.5748292", "0.5746673" ]
0.6386162
14
What is your weakness?
async def choose_your_poison(): return InteractionResponse(embed = Embed('Choose your poison'), components = CHOOSE_YOUR_POISON_ROW)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_weakness(self):\r\n return self.weakness", "def is_weak(self):\n return self.binding == 'STB_WEAK'", "def think(self):\n pass", "def set_weakness(self, weakness):\r\n self.weakness = weakness", "def _perturbInPlaceHard(self):\n die", "def weak(self):\n return self", "def weaknessChecker(self, hasWeakness):\n attack_penalty = 2 if hasWeakness else 0\n\n return attack_penalty", "def check_stability(self):", "def noCheck():\n dislin.nochek()", "def generateBroSafetyCheck(self):\n pass", "def problem_298():\n pass", "def untargeted(self):\n\t\tpass", "def life_critical():\n return True", "def is_not_used(self):\n pass", "def disarm(self):\n pass", "def weak_collision_breaking():\n # Weak Collision Resistance: Given an arbitrary x there exists no x' with x' != x so that h(x) = h(x')\n\n number_trials = 0 # variable to hold number of trials\n\n # Generate 1 random string of length 20 which is fixed for matching\n fixed_rand_str = generate_random_string(20)\n\n while True:\n # Generate a 2nd random strings of length 20\n non_fixed_rand_str = generate_random_string(20)\n\n # Make sure that the strings are not equal to each other\n if fixed_rand_str == non_fixed_rand_str:\n continue\n\n # Otherwise create hashes and see if the hash's match\n else:\n\n hash_string_1, hash_string_2 = generate_hash(fixed_rand_str, non_fixed_rand_str)\n\n # Keep adding to the number of trials\n number_trials += 1\n\n # If the 1st 24 bits of both has values are\n # the same, then break out of the while loop\n # as the hashes match\n if (hash_string_1[0:6] == hash_string_2[0:6]):\n break\n\n return number_trials", "def exercise_b2_53():\r\n pass", "def test_weakref(self):\n registry = ResultRegistry()\n er = EventualResult(None, None)\n registry.register(er)\n ref = weakref.ref(er)\n del er\n gc.collect()\n self.assertIdentical(ref(), None)", "def freeze(self,):\n pass", "def breakpointhook(*args, **kws): # real signature unknown; restored from __doc__\n pass", "def doubt_check(self):\n raise NotImplementedError()", "def exercise_b2_26():\r\n pass", "def lost(self):\r\n return None", "def fix_bug(self):\n self.bugged = False\n self.error_prob = 0.0", "def exercise_b2_106():\r\n pass", "def _optimise(self):\n pass", "def exercise_b2_27():\r\n pass", "def exercise_b2_113():\r\n pass", "def badness(self_):\n return self._badness(self_.time)", "def _disable_weakref(self) -> None:\n self._tx_weakref_disabled = True", "def getHardness(self, hardness):\n return self.__hardness", "def exercise_b2_69():\r\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def freeze_notify(self): # reliably restored by inspect\n pass", "def weakref_proxy(*args, **kwargs):\n\n pass", "def weakref_proxy(*args, **kwargs):\n\n pass", "def exercise_b2_56():\r\n pass", "def exercise_b2_107():\r\n pass", "def dead_end_value(self):\n pass", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def self_tracking(self):\n return self.tlwh()", "def i_am_locking(self):\r\n pass", "def get_spooled(self):\r\n return True", "def exercise_b2_43():\r\n pass", "def known_safes(self):\n self.safes", "def test_pm_Completeness(self):\n pass", "def exercise_b2_98():\r\n pass", "def violated(self) -> bool:\n ...", "def prove_R() -> Proof:\n # Optional Task 6.7g", "def exercise_b2_70():\r\n pass", "def just_died(self):\r\n self.dead = True", "def check_vulnerability(self):\n\t\tpass", "def _verify(self):\n pass", "def gc_collect_cycles():\n raise NotImplementedError()", "def probe(self):", "def exercise_b2_52():\r\n pass", "def _enable_weakref(self) -> None:\n self._tx_weakref_disabled = False", "def exercise_b2_95():\r\n pass", "def exercise_b2_82():\r\n pass", "def mechanism(self):", "def is_weak(self):\n\n # set the minimum number of keypooints\n keypoint_threshold = 20 if self.detector_method == 'FAST'\\\n else 5\n \n # check if the tracker has less than minimum keypoints to track\n c1 = self.old_points.shape[0] < keypoint_threshold\n \n x,y,w,h = self.bounding_box\n row, col = self.fg_mask.shape\n \n # check if the window is out of the frame\n c2 = x >= col-1 or x < 0\n c3 = y >= row-1 or y < 0\n c4 = x+w >= col-1\n c5 = y+h >= row-1\n \n return c1+c2+c3+c4+c5", "def test_kyc_post_legal_share_holder(self):\n pass", "def destantiate(self, memo):\n raise NotImplementedError()", "def test_does_not_die(self):\n self.herb.fitness = 1\n nt.assert_false(self.herb.death())", "def verify_and_freeze(self):\n if self._driver is None and not self._strobers:\n raise ValueError(\n 'internal %s is not driven by anything' % self._name)\n if not self._users:\n raise ValueError(\n 'internal %s is never used' % self._name)\n self._frozen = True", "def ComputeERecoverable(self):\r\n pass", "def is_inequality(self): \n return False", "def prove_NO() -> Proof:\n # Optional Task 6.9c", "def exercise_b2_86():\r\n pass", "def warn():\n pass", "def cannot_resolve ( self, *deps, **kw ):\n return self._do_resolve_weak_greedy ( deps, kw, greedy=True ) is None", "def test_for_leakage(self):\n src, trg = next(iter(self.validation_loader))\n trg_mem = trg.clone().detach()\n result = greedy_decode(self.model, src, 20, trg)\n self.assertNotEqual(result[0, 1, 0], trg_mem[0, 1, 0])\n self.assertEqual(result[0, 1, 1], trg_mem[0, 1, 1])\n self.assertEqual(result[0, 1, 2], trg_mem[0, 1, 2])\n loss = pytorch_criterion_dict[\"MSE\"](trg, trg_mem)\n\n self.assertNotEqual(result[0, 1, 0], result[0, 4, 0])\n self.assertGreater(loss, 0)", "def test_theft_and_stealing(self):", "def exercise_b2_93():\r\n pass", "def friction_model():\n return TimeWeakening()", "def new_strong_reference(self) -> int:\n pass", "def ApplicationEvidence(self) -> Evidence:", "def exercise_b2_39():\r\n pass", "def realsense():\n pass", "def test_flonum_unsafe(doctest):", "def freeze(self):\n raise NotImplementedError()", "def test_full_house_flush_ind(self):", "def broken_refers(one_experiment, storage):\n ensure_deterministic_id(\n \"test_single_exp\", storage, update=dict(refers={\"oups\": \"broken\"})\n )", "def prove_CM() -> Proof:\n # Optional Task 6.7f", "def testWithoutNoise(self):\n self.checkMatching(self.references)" ]
[ "0.6916549", "0.6191317", "0.6120857", "0.6110386", "0.61050045", "0.59646535", "0.5898822", "0.5773751", "0.5765894", "0.56700367", "0.5658522", "0.5572783", "0.5559927", "0.54725146", "0.54229736", "0.5415567", "0.53906286", "0.53847134", "0.5383323", "0.538081", "0.53763866", "0.5367584", "0.5365531", "0.53503186", "0.53496146", "0.5330566", "0.53160214", "0.5293715", "0.52888083", "0.528668", "0.5275317", "0.5272423", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52597314", "0.52526706", "0.52526706", "0.52497977", "0.52273476", "0.5224122", "0.5222532", "0.5222532", "0.5213563", "0.5171371", "0.5167962", "0.5160942", "0.51542217", "0.5146547", "0.5145443", "0.5145034", "0.51370597", "0.5135989", "0.51350385", "0.5134728", "0.5133959", "0.51255757", "0.5117622", "0.5114131", "0.51035047", "0.50882494", "0.50846493", "0.50812685", "0.5078208", "0.50774986", "0.5074169", "0.50733095", "0.50654024", "0.5060792", "0.5057449", "0.50531805", "0.5052782", "0.50447154", "0.50446934", "0.5043595", "0.50404876", "0.50311685", "0.50305396", "0.50303775", "0.5027961", "0.50268066", "0.5024797", "0.5015026", "0.4998008", "0.49958146", "0.49936736", "0.4992264", "0.49900058" ]
0.0
-1
Adds an emoji to the guild.
async def add_emoji( client, event, emoji: ('str', 'The emoji to add.'), name: ('str', 'Custom name to add the emoji with.') = None ): if not client.is_owner(event.user): abort('Owner only!') emoji = parse_emoji(emoji) if emoji is None: abort('That\'s not an emoji.') if emoji.is_unicode_emoji(): abort('Cannot add unicode emojis') if name is None: name = emoji.name else: if len(name) > 32: abort('Name length can be max 32.') embed = Embed('Are you sure to add this emoji?').add_field('Name:', name).add_image(emoji.url) message = yield InteractionResponse(embed = embed, components = ADD_EMOJI_COMPONENTS) try: component_interaction = await wait_for_component_interaction( message, timeout = 300.0, check = functools.partial(check_is_user_same, event.user) ) except TimeoutError: component_interaction = None cancelled = True else: if component_interaction.interaction == ADD_EMOJI_BUTTON_CANCEL: cancelled = True else: cancelled = False if cancelled: embed.title = 'Adding emoji has been cancelled.' else: embed.title = 'Emoji has been added!' async with client.http.get(emoji.url) as response: emoji_data = await response.read() await client.emoji_create(event.guild, name, emoji_data) yield InteractionResponse(embed = embed, components = None, message = message, event = component_interaction)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def emoji(self, new_emoji):\n self._manager._items[self.name][\"emoji\"] = new_emoji", "async def process_add_emoji(\n emoji,\n emoji_name,\n user_id,\n ctx: commands.Context = None,\n inter: AppCmdInter = None,\n allowed_mentions=None,\n):\n response_deferred = await defer_inter(inter)\n url = emoji if not isinstance(emoji, disnake.PartialEmoji) else emoji.url\n user = await User.get(user_id)\n args = tuple()\n key = \"add_emoji_fail\"\n if len(emoji_name) < 2:\n emoji_name = \"EmojiName\"\n\n if ctx:\n http_session = ctx.bot.http_session\n guild = ctx.guild\n else:\n http_session = inter.bot.http_session\n guild = inter.guild\n\n try:\n async with http_session.get(url) as r:\n if r.status == 200:\n await guild.create_custom_emoji(name=emoji_name, image=await r.read())\n key = \"add_emoji_success\"\n except aiohttp.InvalidURL:\n key = \"invalid_url\"\n except disnake.HTTPException as e:\n if e.code == 30008:\n key = \"max_emojis\"\n if e.code == 50035:\n key = \"emoji_size_reached\"\n args = (f\"https://ezgif.com/optimize?url={url}\",)\n except Exception as e:\n logger.error(\n f\"{e} - Processing AddEmoji command failed. \"\n f\"EMOJI: {emoji} -> EMOJI NAME: {emoji_name}, User ID: {user_id}\"\n )\n key = \"add_emoji_fail\"\n\n return await send_message(\n *args,\n key=key,\n user=user,\n inter=inter,\n ctx=ctx,\n allowed_mentions=allowed_mentions,\n response_deferred=response_deferred,\n )", "async def emoji(self, ctx):\n emb = discord.Embed(colour=self.color)\n emb.add_field(name='Usage', value=f'```{self.bot.command_prefix}emoji <emojiname>```')\n await ctx.message.edit(embed=emb)", "async def starboard_emoji(self, ctx, emoji):\n if emoji[0] == \"<\":\n # is custom emoji\n emoji_obj = await util.get_emoji(ctx, emoji)\n if emoji_obj is None:\n raise exceptions.Warning(\"I don't know this emoji!\")\n\n await self.bot.db.execute(\n \"\"\"\n INSERT INTO starboard_settings (guild_id, emoji_name, emoji_id, emoji_type)\n VALUES (%s, %s, %s, %s)\n ON DUPLICATE KEY UPDATE\n emoji_name = VALUES(emoji_name),\n emoji_id = VALUES(emoji_id),\n emoji_type = VALUES(emoji_type)\n \"\"\",\n ctx.guild.id,\n None,\n emoji_obj.id,\n \"custom\",\n )\n await util.send_success(\n ctx, f\"Starboard emoji is now {emoji} (emoji id `{emoji_obj.id}`)\"\n )\n else:\n # unicode emoji\n emoji_name = emoji_literals.UNICODE_TO_NAME.get(emoji)\n if emoji_name is None:\n raise exceptions.Warning(\"I don't know this emoji!\")\n\n await self.bot.db.execute(\n \"\"\"\n INSERT INTO starboard_settings (guild_id, emoji_name, emoji_id, emoji_type)\n VALUES (%s, %s, %s, %s)\n ON DUPLICATE KEY UPDATE\n emoji_name = VALUES(emoji_name),\n emoji_id = VALUES(emoji_id),\n emoji_type = VALUES(emoji_type)\n \"\"\",\n ctx.guild.id,\n emoji_name,\n None,\n \"unicode\",\n )\n await util.send_success(ctx, f\"Starboard emoji is now {emoji}\")\n await self.bot.cache.cache_starboard_settings()", "def _add_emoji_listener(self, chan_id: str, msg_id: str, emoji: str):\n if not emoji in self._messages[chan_id][msg_id][\"reactions\"]:\n self._messages[chan_id][msg_id][\"reactions\"][emoji] = {\n \"add_callbacks\": [],\n \"rm_callbacks\": []\n }\n else:\n raise ValueError(\"Tried to create space for an already existing listener!\")", "async def addreact(self, ctx, word, emoji):\n guild = ctx.message.guild\n message = ctx.message\n emoji = https://i.imgur.com/CWeQ620.jpg", "async def addemoji(self, ctx, url: str, emoji_name=None):\n org_emoji_name = emoji_name\n list_of_emojis = url.split(',')\n for emoji in list_of_emojis:\n await asyncio.sleep(0)\n try:\n url = await self.make_emoji(ctx, emoji)\n if isinstance(url, str):\n emoji_name = str(org_emoji_name)\n if isinstance(url, discord.partial_emoji.PartialEmoji) or isinstance(url, discord.PartialEmoji):\n if not org_emoji_name or len(list_of_emojis) > 1:\n emoji_name = f\"{url.name}\"\n url = f\"{url.url}\"\n if len(emoji_name) < 2:\n return await ctx.send(\"> **Please enter an emoji name more than two letters.**\")\n async with self.ex.session.get(url) as r:\n if r.status == 200:\n await ctx.guild.create_custom_emoji(name=emoji_name, image=await r.read())\n emojis = self.ex.client.emojis\n max_emoji_length = len(emojis)\n if emoji_name in str(emojis[max_emoji_length-1]):\n await ctx.send(emojis[max_emoji_length-1])\n elif emoji_name in str(emojis[0]):\n await ctx.send(emojis[0])\n else:\n await ctx.send(f\"> **Added :{emoji_name}:**\")\n elif r.status == 404:\n await ctx.send(\"> **That URL was not Found.**\")\n elif r.status == 403:\n await ctx.send(\"> **I do not have access to that site.**\")\n else:\n await ctx.send(\"> **I was not able to connect to that url**\")\n except discord.HTTPException as e:\n if e.code == 30008:\n err_msg = f\"Could not add emoji due to the maximum number of emojis reached.\"\n log.console(f\"{err_msg} Guild ID: {ctx.guild.id}\")\n return await ctx.send(f\"> **{err_msg}**\")\n if e.code == 50035:\n ezgif = f\"https://ezgif.com/optimize?url={url}\"\n log.console(\n f\"File cannot be larger than 256.0 kb. Please optimize the emoji here. {ezgif}\")\n return await ctx.send(\n f\">>> **File cannot be larger than 256.0 kb. Please optimize the emoji here.**\\n <{ezgif}>\")\n\n except aiohttp.InvalidURL:\n await ctx.send(f\"> **Invalid URL.**\")\n except Exception as e:\n log.console(e)", "def test__Emoji__guild():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_is(emoji.guild, None)\n \n emoji = Emoji()\n vampytest.assert_is(emoji.guild, None)\n \n emoji_id = 202301010069\n guild_id = 202301010070\n guild = Guild.precreate(guild_id)\n emoji = Emoji.precreate(\n emoji_id,\n guild_id = guild_id,\n )\n vampytest.assert_is(emoji.guild, guild)", "async def emoji(self, ctx, *, text):\n await ctx.message.delete()\n new_msg = \"\"\n for char in text:\n if char.isalpha():\n new_msg += char_to_emoji(char) + ' '\n elif char == ' ':\n new_msg += ' '\n elif char.isspace():\n new_msg += char\n\n if len(new_msg):\n await ctx.send(new_msg)", "async def _msgvote_upemoji(self, ctx, emoji):\n\n emoji = str(self.fix_custom_emoji(ctx.message.server, emoji))\n self.settings[\"up_emoji\"] = emoji\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Upvote emoji set to: \" + emoji)", "async def emoji_create(client, emoji):\n channel = get_log_emoji_channel(emoji.guild_id)\n if (channel is None):\n return\n \n # We get the creator of the emoji.\n try:\n await client.emoji_get(emoji, force_update=True)\n except ConnectionError:\n # No internet connection\n return\n \n except DiscordException as err:\n # Sticker already deleted?\n if err.code != ERROR_CODES.unknown_emoji:\n raise\n \n await client.message_create(\n channel,\n embed = build_emoji_create_embed(emoji),\n allowed_mentions = None,\n )", "async def emojireact(self, ctx):\n if ctx.invoked_subcommand is None:\n guild = ctx.message.guild\n guild_emoji = await self.config.guild(guild).guild()\n unicode_emoji = await self.config.guild(guild).unicode()\n if ctx.channel.permissions_for(ctx.me).embed_links:\n em = discord.Embed(colour=discord.Colour.blue())\n em.title = _(\"Emojireact settings for \") + guild.name\n if guild_emoji:\n em.add_field(name=_(\"Server Emojis \"), value=str(guild_emoji))\n if unicode_emoji:\n em.add_field(name=_(\"Unicode Emojis \"), value=str(unicode_emoji))\n if len(em.fields) > 0:\n await ctx.send(embed=em)\n else:\n msg = _(\"Emojireact settings for \") + guild.name + \"\\n\"\n if guild_emoji:\n msg += _(\"Server Emojis \") + str(guild_emoji) + \"\\n\"\n if unicode_emoji:\n msg += _(\"Unicode Emojis \") + str(unicode_emoji) + \"\\n\"\n await ctx.send(msg)", "async def stealemoji(self, ctx, *, emojis):\n try:\n m = await commands.MessageConverter().convert(ctx, emojis)\n emojis = m.content\n except commands.MessageNotFound:\n pass\n\n emojis = [await commands.PartialEmojiConverter().convert(ctx, e) for e in\n re.findall(r'<a?:\\w+:\\d+>', emojis)]\n\n if not emojis:\n await ctx.send_help()\n return\n\n ae = list(ctx.guild.emojis) + emojis\n if len([e for e in ae if not e.animated]) > ctx.guild.emoji_limit:\n await ctx.send(\"Not enough emoji slots\")\n if len([e for e in ae if e.animated]) > ctx.guild.emoji_limit:\n await ctx.send(\"Not enough animated emoji slots\")\n\n async with ctx.typing():\n for emoji in emojis:\n if emoji.name in [e.name for e in ctx.guild.emojis]:\n continue\n await ctx.guild.create_custom_emoji(name=emoji.name, image=await emoji.url.read())\n await ctx.tick()", "async def _serveremoji(self, ctx):\n non_animated_list= [f'<:{i.name}:{i.id}>' for i in ctx.guild.emojis if not i.animated]\n animated_list= [f'<a:{i.name}:{i.id}>' for i in ctx.guild.emojis if i.animated]\n\n if len(non_animated_list)==0 and len(animated_list)==0:\n await ctx.send(f\"\"\":exclamation: {ctx.author.mention}\n```{random.choice(self.bot.SERVER_CONFIG['text_colors'])}\nNo custom emojis has been added in this Server.\n```\"\"\")\n else:\n #NON ANIMATED EMOJIS\n if len(non_animated_list)>0:\n await ctx.send(f'**{len(non_animated_list)} Server Emojis**')\n k=0\n non_animated=[]\n temp=''\n for i in range(ceil(len(non_animated_list)/5)):\n temp += ' '.join(non_animated_list[k:k+5])+'\\n'\n k+=5\n if k%25==0:\n non_animated.append(temp)\n temp=''\n non_animated.append(temp) if temp !='' else ''\n \n for i in non_animated:\n await ctx.send(i)\n\n\n #ANIMATED EMOJIS\n if len(animated_list)>0:\n await ctx.send(f'**{len(animated_list)} Server Animated Emojis**')\n k=0\n animated=[]\n temp=''\n for i in range(ceil(len(animated_list)/5)):\n temp += ' '.join(animated_list[k:k+5])+'\\n'\n k+=5\n if k%25==0:\n animated.append(temp)\n temp=''\n animated.append(temp) if temp !='' else ''\n \n for i in animated:\n await ctx.send(i)", "async def htc(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"htc\", emoji)", "async def getemoji(self, ctx):\n pass", "async def emoji_edit(client, emoji, old_attributes):\n channel = get_log_emoji_channel(emoji.guild_id)\n if (channel is None):\n return\n \n await client.message_create(\n channel,\n embed = build_emoji_edit_embed(emoji, old_attributes),\n allowed_mentions = None,\n )", "def init_emoji(self, client):\n for emoji in client.get_all_emojis():\n if emoji.name == self.emoji:\n self.emoji = str(emoji)\n return\n\n self.emoji = \":\" + self.emoji + \":\"", "def is_custom_emoji(self):\n ...", "async def emojis(self, ctx):\n\n\t\tawait self.message_leaderboard(ctx, \"emojis\")", "async def emojis(self, ctx):\n server = ctx.message.server\n await self.bot.say('This may take some time, generating list...')\n data = discord.Embed(description=\"Emojilist\")\n for ej in server.emojis:\n data.add_field(\n name=ej.name, value=str(ej) + \" \" + ej.id, inline=False)\n await self.bot.say(embed=data)", "def is_unicode_emoji(self):\n ...", "def save_emojicon(args):\n json_file = args.file[0]\n emoji_id = args.id[0]\n emojis = load_file(json_file, graceful=True)\n try:\n emoji = fetch_emojis(_config['route']['get'].format(id=emoji_id))[0]\n if emoji.get('id') not in [x.get('id') for x in emojis]:\n emojis.append(emoji)\n save_file(json_file, emojis)\n print(\"Emoji saved to '{0}'\".format(json_file))\n print_table([emoji])\n else:\n print(\"ยฏ\\_(ใƒ„)_/ยฏ Emoji with id '{0}' already saved!\".format(emoji_id))\n except IndexError:\n logging.error(\"ยฏ\\_(ใƒ„)_/ยฏ Couldn't find the emoji with id '{0}'!\"\n .format(emoji_id))\n sys.exit(3)", "async def openmoji(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"openmoji\", emoji)", "async def messenger(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"messenger\", emoji)", "async def emojidex(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"emojidex\", emoji)", "async def emoji_delete(client, emoji):\n channel = get_log_emoji_channel(emoji.guild_id)\n if (channel is None):\n return\n \n await client.message_create(\n channel,\n embed = build_emoji_delete_embed(emoji),\n allowed_mentions = None\n )", "def emoji(self):\n return self._manager.get_emoji(self.name)", "async def apple(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"apple\", emoji)", "async def mozilla(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"mozille\", emoji)", "async def get_emoji(self, guild_id: int, emoji_id: int) -> Emoji:\n if not guild_id:\n raise ValueError(\"Argument cannot be None: guild_id\")\n\n if not emoji_id:\n raise ValueError(\"Argument cannot be None: emoji_id\")\n\n emoji = await self._request(Route(\"GET\", f'/guilds/{guild_id}/emojis/{emoji_id}'))\n\n return Emoji(**emoji)", "def get_emoji(self, icon):\n emojis = self.config['emojis']\n emoji = emojis.get(icon, '')\n return emoji", "def add_mention(self, qid: str, mention: str, score: float):\n self._entity_symbols.add_mention(qid, mention, score)", "async def emojiboard(self, ctx, arg1: U = None, arg2: U = None, arg3: U = None):\n (channel, member, emoji) = self.resolve_arguments(arg1, arg2, arg3, types=get_args(U))\n\n await ctx.trigger_typing()\n\n member_id = member.id if member else None\n channel_id = channel.id if channel else None\n bot_ids = [bot.id for bot in filter(lambda user: user.bot, ctx.guild.members)]\n emoji_id = emoji.id if emoji else None\n\n data = await self.bot.db.emojiboard.select(ctx.guild.id, bot_ids, channel_id, member_id, emoji_id)\n\n embed =await self.display_emojiboard(ctx, data)\n await ctx.send(embed=embed)", "async def getemojiid(ctx, emoji: discord.Emoji):\n return await ctx.send(f\"{emoji} - `{emoji}`\")", "def test__Emoji__partial():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_false(emoji.partial)\n \n emoji = Emoji()\n vampytest.assert_true(emoji.partial)\n \n emoji_id = 202301010038\n guild_id = 202301010039\n emoji = Emoji.precreate(emoji_id, guild_id = guild_id)\n vampytest.assert_true(emoji.partial)\n \n \n emoji_id = 202301010040\n guild_id = 202301010041\n guild = Guild.precreate(guild_id)\n emoji = Emoji.precreate(emoji_id, guild_id = guild_id)\n guild.emojis[emoji_id] = emoji\n vampytest.assert_true(emoji.partial)\n \n \n client = Client(\n token = 'token_20230101_0000',\n )\n \n try:\n emoji_id = 202301010042\n guild_id = 202301010043\n guild = Guild.precreate(guild_id)\n guild.clients.append(client)\n emoji = Emoji.precreate(emoji_id, guild_id = guild_id)\n guild.emojis[emoji_id] = emoji\n vampytest.assert_false(emoji.partial)\n \n # Cleanup\n finally:\n client._delete()\n client = None\n clients = None", "async def _msgvote_downemoji(self, ctx, emoji):\n\n emoji = str(self.fix_custom_emoji(ctx.message.server, emoji))\n self.settings[\"dn_emoji\"] = emoji\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Downvote emoji set to: \" + emoji)", "async def say(self, ctx, *, message):\n message = self.emojify(message)\n await ctx.send(message)", "async def show_emoji(\n emoji_name: str\n):\n emoji = parse_emoji(emoji_name)\n if emoji is None:\n abort('Please give an emoji')\n \n if emoji.is_unicode_emoji():\n abort('Cannot link unicode emojis.')\n \n return f'**Name:** {emoji} **Link:** {emoji.url}'", "async def addme(self, ctx):\n invite_url = discord.utils.oauth_url(self.bot.user.id, permissions=discord.Permissions(8))\n embed = self.bot.embeds.embed_builder(title='Add this bot to your own Discord server',\n description=invite_url,\n has_footer=False)\n await ctx.send(embed=embed)", "async def delete_emoji(self, guild_id: int, emoji_id: int):\n if not guild_id:\n raise ValueError(\"Argument cannot be None: channel_id\")\n\n if not emoji_id:\n raise ValueError(\"Argument cannot be None: emoji_id\")\n\n await self._request(Route('DELETE', f'/guilds/{guild_id}/emojis/{emoji_id}'))", "def test__Emoji__as_emoji():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_instance(emoji.as_emoji, str)\n \n emoji = Emoji()\n vampytest.assert_instance(emoji.as_emoji, str)", "async def facebook(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"facebook\", emoji)", "def test__Emoji__is_custom_emoji():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_false(emoji.is_custom_emoji())\n \n emoji = Emoji()\n vampytest.assert_true(emoji.is_custom_emoji())", "async def google(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"google\", emoji)", "async def whatsapp(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"whatsapp\", emoji)", "def test_emoji(self):\n name = u\"Emoji \\U0001F604\"\n elem = createElement(\"test\", text=name)\n xmlString1 = elementToXML(elem)\n parsed = readXMLString(xmlString1)[1]\n xmlString2 = elementToXML(parsed)\n self.assertEquals(xmlString1, xmlString2)", "def _handle_emojis(self, emojis: typing.List[dict]):\n for emoji in emojis:\n emoji_obj = dt_emoji.Emoji(**emoji, client=self._bot)\n emoji_obj.guild_id = self.id\n self._emojis[emoji_obj.id] = emoji_obj", "async def _unicode(self, ctx):\n if await self.config.guild(ctx.guild).unicode():\n await self.config.guild(ctx.guild).unicode.set(False)\n msg = _(\"Okay, I will not react to messages \" \"containing unicode emojis!\")\n await ctx.send(msg)\n else:\n await self.config.guild(ctx.guild).unicode.set(True)\n msg = _(\"Okay, I will react to messages \" \"containing unicode emojis!\")\n await ctx.send(msg)", "async def register_reaction_role(self, ctx, target_role_id: int, emoji_name: str):\n\n await self.bot.wait_until_ready()\n\n if emoji_name[0] == \"<\":\n emoji_name = emoji_name[1:-1]\n\n if target_role_id in self.bot.config.staff_role_ids:\n return await ctx.send(\"Error: Dangerous role found!\")\n\n target_role = ctx.guild.get_role(target_role_id)\n\n if target_role is None:\n return await ctx.send(\"Error: Role not found!\")\n\n target_role_name = target_role.name\n\n for key in self.reaction_config[\"reaction_roles_emoji_map\"]:\n value = self.reaction_config[\"reaction_roles_emoji_map\"][key]\n if type(value) is str and target_role_name == value:\n return await ctx.send(f\"Error: {target_role_name}: already registered.\")\n\n self.reaction_config[\"reaction_roles_emoji_map\"][emoji_name] = target_role_name\n self.save_reaction_config(self.reaction_config)\n await self.reload_reaction_message(False)\n\n await ctx.send(f\"{target_role_name}: registered.\")", "async def twitter(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"twitter\", emoji)", "def post(self, message, emoji=None):\n if not emoji:\n emoji = self.cfg['slack_emoji']\n response = self.slack.api_call(\n \"chat.postMessage\", channel=self.cfg['slack_channel'], text=message,\n username=self.cfg['slack_username'], icon_emoji=emoji\n )\n if 'ok' in response:\n return True\n logging.error(\"Error sending message: %s\", response['error'])\n return False", "async def on_raw_reaction_add(self, payload):\n emoji = str(payload.emoji)\n member = payload.member\n\n if member.bot:\n return\n\n channel = await self.bot.fetch_channel(payload.channel_id)\n message = await channel.fetch_message(payload.message_id)\n\n if emoji != settings.get_ticket_create_emoji():\n return\n \n if len(message.embeds) == 0 or message.embeds[0].title != settings.get_ticket_panel_embed().title:\n return\n \n await message.remove_reaction(emoji, member)\n await self.create_ticket(member,message.guild)", "def select_emoji():\n Selects\n try:\n emoji_icon = driver.find_element_by_xpath('//*[@id=\"content\"]/div/div/div/div[2]/div/div/div[2]/div[3]/div/div/div/div/div[5]/div[3]')\n emoji_icon.click()\n emojis = driver.find_elements_by_class_name('emoji-mart-emoji')\n emojis[0].click()\n click_on('type here')\n wait()\n except Exception as e:\n return \"Error: \" + str(e)\n return \"Success\"", "async def joypixels(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"joypixels\", emoji)", "async def microsoft(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"microsoft\", emoji)", "async def emote(self, ctx: Context, *, urls=None):\n if urls is None:\n urls = \"\"\n\n if ctx.message.attachments:\n for attachment in ctx.message.attachments:\n urls += attachment.url + \" \"\n\n custom_emojis = re.findall(r\"<a?:(\\w+):(\\d+)>\", urls)\n\n try:\n if re.findall('https?://(?:[-\\\\w.]|(?:%[\\\\da-fA-F]{2}))+', urls):\n # removing duplicate spaces\n urls = \" \".join(urls.split())\n url_list = urls.split(\" \")\n names = [link.split(\"/\")[-1] for link in url_list]\n names = [name[:name.find(\".\") + 1].replace(\".\", \"\") for name in names]\n responses = []\n\n for url in url_list:\n async with self.session.get(url) as response:\n responses.append(await response.read())\n\n images = list(response for response in responses)\n\n for i, name in enumerate(names):\n image = images[i]\n emoji = await ctx.guild.create_custom_emoji(name=name, image=image, reason=None)\n await ctx.send(f\"{emoji.url} \\nemoji {emoji.name} was created\")\n\n if custom_emojis:\n for emote in custom_emojis:\n url = f\"https://cdn.discordapp.com/emojis/{emote[1]}.png?v=1\"\n name = emote[0]\n\n async with self.session.get(url) as response:\n image = await response.read()\n\n emoji = await ctx.guild.create_custom_emoji(name=name, image=image, reason=None)\n await ctx.send(f\"{emoji.url} \\nemoji {emoji.name} was created\")\n\n except discord.errors.HTTPException as e:\n if e.status == 400:\n await ctx.send(f\":no_entry: | an error occurred during the emote process ```{e.text}```.\")", "def test__put_partial_emoji_inline_data_into__1():\n data = {}\n emoji = BUILTIN_EMOJIS['heart']\n \n put_partial_emoji_inline_data_into(emoji, data)\n \n vampytest.assert_in('emoji_name', data)", "async def suggest(self, ctx, *, suggestion: str):\n emojis = [\"โœ…\", \"โŒ\"]\n author = ctx.author\n guild = ctx.guild\n embed = Embed(color=Color.blurple(), timestamp=datetime.utcnow())\n embed.add_field(name=\"Suggestion\", value=suggestion)\n embed.set_author(name=f\"Suggestion by - {author}\", icon_url=author.avatar_url)\n msg = await ctx.send(embed=embed)\n await ctx.message.delete()\n for i in range(len(emojis)):\n await msg.add_reaction(emojis[i])", "def test__Emoji__is_unicode_emoji():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_true(emoji.is_unicode_emoji())\n \n emoji = Emoji()\n vampytest.assert_false(emoji.is_unicode_emoji())", "async def create(self, *,\n name: str, image_data: typing.Union[str, bytes],\n roles: 'typing.List[role.Role]' = None) -> 'dt_emoji.Emoji':\n if isinstance(image_data, bytes):\n image_data = base64ify(image_data)\n\n if roles is not None:\n roles = [r.id for r in roles]\n\n emoji_data = await self._guild._bot.http.create_guild_emoji(self._guild.id,\n name=name,\n image_data=image_data,\n roles=roles)\n emoji = dt_emoji.Emoji(**emoji_data, client=self._guild._bot)\n return emoji", "def isUnicodeEmoji(c : str) -> bool:\n return c in UNICODE_EMOJI", "def test__parse_emoji__coloned_builtin_name():\n emoji = BUILTIN_EMOJIS['heart']\n text = f':{emoji.name}:'\n\n parsed_emoji = parse_emoji(text)\n vampytest.assert_is(emoji, parsed_emoji)", "async def rolemenu_add_role(self,\n interaction: discord.Interaction,\n name: str,\n role: discord.Role,\n emoji: str = None,\n description: str = None):\n doc = await self.db.find_one({\n \"guild_id\": interaction.guild.id,\n \"name\": name\n })\n if not doc:\n return await interaction.response.send_message(\n \"No role menu with that name exists.\", ephemeral=True)\n for role_doc in doc[\"roles\"]:\n if role_doc[\"id\"] == role.id:\n return await interaction.followup.send(\n \"Role is already in the menu.\", ephemeral=True)\n if len(doc[\"roles\"]) >= 25:\n return await interaction.response.send_message(\n \"This role menu is full.\", ephemeral=True)\n await interaction.response.defer(ephemeral=True)\n if role.guild != interaction.guild:\n return await interaction.response.send_message(\n \"This role is not in this server.\")\n if emoji:\n if emoji.startswith(\"<\") and emoji.endswith(\">\"):\n try:\n emoji = int(emoji[1:-1].split(\":\")[2])\n except ValueError:\n return await interaction.followup.send(\"Invalid emoji.\")\n else:\n try:\n message = await interaction.original_message()\n await message.add_reaction(emoji)\n except discord.HTTPException:\n return await interaction.followup.send(\"Invalid emoji.\")\n await self.db.update_one({\"_id\": doc[\"_id\"]}, {\n \"$push\": {\n \"roles\": {\n \"description\": description,\n \"id\": role.id,\n \"emoji\": emoji,\n \"date_added\": datetime.datetime.now(datetime.datetime.u)\n }\n }\n })\n doc = await self.db.find_one({\"_id\": doc[\"_id\"]})\n await interaction.followup.send(f\"Added {role.mention} to the menu.\")\n menu = Menu(self, interaction.guild, doc)\n await menu.update()", "def addReactionGlyph(self, *args):\n return _libsbml.Layout_addReactionGlyph(self, *args)", "def test__Emoji__as_reaction():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_instance(emoji.as_reaction, str)\n \n emoji = Emoji()\n vampytest.assert_instance(emoji.as_reaction, str)", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def addCustom(self, word, guildId):\n flag = self.con.addCustomWord(word, guildId)\n\n if flag:\n self.custom[str(guildId)].append(word)\n\n return flag", "async def docomo(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"docomo\", emoji)", "async def mask(self, ctx, *, message):\n message = self.emojify(message)\n await ctx.message.delete()\n await ctx.send(message)", "async def on_raw_reaction_add(self, payload):\n\n\t\tguild = self.bot.get_guild(payload.guild_id)\n\t\tif guild is not None:\n\t\t\tchannel = guild.get_channel(payload.channel_id)\n\t\t\tmessage = await channel.fetch_message(payload.message_id)\n\t\t\tuser = guild.get_member(payload.user_id)\n\n\t\t\t# Update cached leaderboards\n\t\t\tif not payload.member.bot:\n\t\t\t\tif payload.message_id in self.cachedMessages:\n\t\t\t\t\tif payload.emoji.name == \"โžก๏ธ\":\n\t\t\t\t\t\tawait self.update_leaderboard_message(message, 1)\n\t\t\t\t\t\tawait message.remove_reaction(\"โžก๏ธ\", user)\n\t\t\t\t\telif payload.emoji.name == \"โฌ…๏ธ\":\n\t\t\t\t\t\tawait self.update_leaderboard_message(message, -1)\n\t\t\t\t\t\tawait message.remove_reaction(\"โฌ…๏ธ\", user)\n\n\t\t\t# Update reaction leaderboards\n\t\t\tif not payload.member.bot:\n\t\t\t\treactionLeaderboard = self.leaderboards[str(payload.guild_id)][\"reactionLeaderboard\"]\n\n\t\t\t\tif payload.emoji.id is not None:\n\t\t\t\t\tfor guildEmoji in guild.emojis:\n\t\t\t\t\t\tif payload.emoji.id == guildEmoji.id:\n\t\t\t\t\t\t\tif (\"<:\" + str(payload.emoji.name) + \":\" + str(payload.emoji.id) + \">\") not in reactionLeaderboard:\n\t\t\t\t\t\t\t\treactionLeaderboard[\"<:\" + str(payload.emoji.name) + \":\" + str(payload.emoji.id) + \">\"] = 1\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\treactionLeaderboard[\"<:\" + str(payload.emoji.name) + \":\" + str(payload.emoji.id) + \">\"] += 1\n\n\n\n\t\t\t\t\t\t\tbreak\n\t\t\t\telse:\n\t\t\t\t\tif payload.emoji.name not in reactionLeaderboard:\n\t\t\t\t\t\treactionLeaderboard[str(payload.emoji.name)] = 1\n\t\t\t\t\telse:\n\t\t\t\t\t\treactionLeaderboard[str(payload.emoji.name)] += 1\n\n\t\t\t\tif str(payload.emoji.id) in self.leaderboards[str(payload.guild_id)][\"emojiLeaderboard\"]:\n\t\t\t\t\tself.leaderboards[str(payload.guild_id)][\"emojiLeaderboard\"][str(payload.emoji.id)] += 1", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "async def add(self, ctx, command_type, command, *output):\n command = command.lower()\n\n if command_type in (\"0\", \"no_prefix\", \"no prefix\"):\n command_type = 0\n elif command_type in (\"1\", \"prefix\"):\n command_type = 1\n elif command_type in (\"2\", \"embed\"):\n command_type = 2\n if len(output) < 2:\n raise roxbot.UserError(self.ERROR_EMBED_VALUE)\n try:\n output = self._embed_parse_options(output)\n except ValueError:\n raise roxbot.UserError(self.ERROR_OUTPUT_TOO_LONG)\n else:\n raise roxbot.UserError(self.ERROR_INCORRECT_TYPE)\n\n with db_session:\n\n if ctx.message.mentions or ctx.message.mention_everyone or ctx.message.role_mentions:\n raise roxbot.UserError(self.ERROR_AT_MENTION)\n elif len(output) > 1800:\n raise roxbot.UserError(self.ERROR_OUTPUT_TOO_LONG)\n elif command in self.bot.all_commands.keys() and command_type == 1:\n raise roxbot.UserError(self.ERROR_COMMAND_EXISTS_INTERNAL)\n elif select(c for c in CCCommands if c.name == command and c.guild_id == ctx.guild.id).exists():\n raise roxbot.UserError(self.ERROR_COMMAND_EXISTS)\n elif len(command.split(\" \")) > 1 and command_type == \"1\":\n raise roxbot.UserError(self.ERROR_PREFIX_SPACE)\n\n\n CCCommands(name=command, guild_id=ctx.guild.id, output=output, type=command_type)\n\n return await ctx.send(self.OUTPUT_ADD.format(command, output if len(output) > 1 or isinstance(output, dict) else output[0]))", "def fix_emoji(val):\n def _emoji_debugger(val):\n s = val.replace('<span class=\"emoji emoji1f450\"></span',\n '<span class=\"emoji emoji1f450\"></span>')\n\n def __fix_miss_match(m):\n return '<span class=\"emoji emoji%s\"></span>' % ({\n '1f63c': '1f601', '1f639': '1f602', '1f63a': '1f603',\n '1f4ab': '1f616', '1f64d': '1f614', '1f63b': '1f60d',\n '1f63d': '1f618', '1f64e': '1f621', '1f63f': '1f622',\n }.get(m.group(1), m.group(1)))\n return WeChatMeta.RE['emoji'].sub(__fix_miss_match, s)\n\n def _emoji_formatter(m):\n s = m.group(1)\n if len(s) == 6:\n return ('\\\\U%s\\\\U%s'%(s[:2].rjust(8, '0'), s[2:].rjust(8, '0')))\\\n .encode('utf8').decode('unicode-escape', 'replace')\n elif len(s) == 10:\n return ('\\\\U%s\\\\U%s'%(s[:5].rjust(8, '0'), s[5:].rjust(8, '0')))\\\n .encode('utf8').decode('unicode-escape', 'replace')\n else:\n return ('\\\\U%s'%m.group(1).rjust(8, '0'))\\\n .encode('utf8').decode('unicode-escape', 'replace')\n val = _emoji_debugger(val)\n val = WeChatMeta.RE['emoji'].sub(_emoji_formatter, val)\n return val", "async def enable(self, ctx: Context, *, guild: int = None):\n\n if guild is None:\n guild = ctx.guild\n else:\n guild = self.bot.get_guild(guild)\n\n if not guild:\n return await ctx.message.add_reaction(\"โš \")\n\n self._create_guild_config(guild)\n\n await ctx.message.add_reaction(\"โœ…\")", "def what_means_emoji(emoji: str) -> str:\n try:\n return unicodedata.name(emoji)\n except TypeError:\n return \"Not found\"", "async def _guild(self, ctx):\n if await self.config.guild(ctx.guild).guild():\n await self.config.guild(ctx.guild).guild.set(False)\n msg = _(\"Okay, I will not react to messages \" \"containing server emojis!\")\n await ctx.send(msg)\n else:\n await self.config.guild(ctx.guild).guild.set(True)\n msg = _(\"Okay, I will react to messages \" \"containing server emojis!\")\n await ctx.send(msg)", "def use_external_emojis(_) -> int:\n return 1 << 18", "def use_external_emojis(_) -> int:\n return 1 << 18", "async def invite(self, ctx):\n await ctx.send(f'๐ŸฑYou can invite me to your server using the following url:\\n{self.invite_url}'\n '\\n\\nYou will need the **Manage Server** permission to add me to a server. '\n f'Run `{self.heleus.command_prefix[0]}help` to see what you can customise!')", "def join(self) -> str:\n\n return _ZWJ.join(e.emoji for e in self.emojis)", "def isemoji(c):\n if type(c) == str:\n c = c.encode('utf-8')\n c = bytes(c)\n return c.decode() in UNICODE_EMOJI", "async def lg(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"lg\", emoji)", "def replace_emojis(text, replace_with=\"_EMOJI_\"):\n return RE_EMOJI.sub(replace_with, text)", "def addGuild(self, guildId):\n flag = False\n\n if self.con.recordGuild(guildId):\n self.serverSettings[str(guildId)] = '1'\n self.custom[str(guildId)] = []\n self.ignored[str(guildId)] = []\n self.whitelist[str(guildId)] = []\n flag = True\n\n return flag", "def test__parse_emoji():\n unicode_emoji = BUILTIN_EMOJIS['heart']\n custom_emoji = Emoji.precreate(202305240006, name = 'Yukari')\n \n for input_data, expected_output in (\n ({}, None),\n ({'emoji_name': None}, None),\n ({'emoji_name': unicode_emoji.unicode}, unicode_emoji),\n ({'emoji_name': None, 'emoji_id': str(custom_emoji.id)}, custom_emoji),\n ):\n output = parse_emoji(input_data)\n vampytest.assert_is(output, expected_output)", "def adduser(self, user, **kwargs):\r\n uid = user.id\r\n oldchar = None\r\n if uid in self.usercharacters:\r\n oldchar = self.usercharacters[uid]\r\n newchar = Character(self, user, **kwargs)\r\n self.usercharacters[uid] = newchar\r\n message = []\r\n message.append(f'New character: {newchar.format()}')\r\n if oldchar:\r\n message.append(f'Replaces: {oldchar.format()}')\r\n return '\\n'.join(message)", "def get_random_emoji():\n return (random.choice(get_emoji_list())).encode('utf-8').decode('utf-8')", "async def add_starboard(self, ctx):\n channel = await ctx.get_text_channel(embed=CustomEmbeds.add(author=\"Channel\",\n description=\"Send a channel to add it to the starboard!\"))\n emotes = await ctx.get_emotes(embed=CustomEmbeds.add(author=\"Emotes\",\n description=\"React with emotes and then click โœ… to add them to the starboard.\"))\n threshold = await ctx.get_int(embed=CustomEmbeds.add(author=\"Add a Threshold\",\n description=\"Send message with the minimum number of reactions for it to be added to the starboard.\"))\n\n guild_starboards = await self.starboards_collection.find_one({\"_id\": ctx.guild.id})\n if guild_starboards is None:\n starboard_len = 0\n else:\n starboard_len = len(guild_starboards.get(\"starboards\"))\n\n starboard = Starboard(index=starboard_len,\n channel=channel,\n emotes=emotes,\n threshold=threshold)\n\n await self.db_add_starboard(ctx.guild, starboard.serialize())\n await ctx.send(embed=CustomEmbeds.confirm(author=\"Starboard Added\", description=f\"ID: {starboard_len}\\n\"\n f\"Channel: {channel.mention}\\n\"\n f\"Emotes: {' '.join(emotes)}\\n\"\n f\"Threshold: {threshold}\"))", "async def quote_add(self,ctx, *message: str):\n await self.bot.type()\n num = self.add([' '.join(message), ctx.message.author.id, ctx.message.channel.id])\n await self.bot.say(\"Quote #{} has been added.\".format(num))", "async def samsung(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"samsung\", emoji)", "async def guild_icon(\n event,\n choice: (GUILD_ICON_CHOICES, 'Which icon of the guild?' ) = 'icon',\n):\n guild = event.guild\n if (guild is None) or guild.partial:\n return Embed('Error', 'The command unavailable in guilds, where the application\\'s bot is not in.')\n \n if choice == 'icon':\n name = 'icon'\n url = guild.icon_url_as(size = 4096)\n hash_value = guild.icon_hash\n \n elif choice == 'banner':\n name = 'banner'\n url = guild.banner_url_as(size = 4096)\n hash_value = guild.banner_hash\n \n elif choice == 'discovery_splash':\n name = 'discovery splash'\n url = guild.discovery_splash_url_as(size = 4096)\n hash_value = guild.discovery_splash_hash\n \n else:\n name = 'invite splash'\n url = guild.invite_splash_url_as(size = 4096)\n hash_value = guild.invite_splash_hash\n \n if url is None:\n color = (event.id >> 22) & 0xFFFFFF\n return Embed(f'{guild.name} has no {name}', color = color)\n \n color = hash_value & 0xFFFFFF\n return Embed(f'{guild.name}\\'s {name}', color = color, url = url).add_image(url)", "async def emojiapiurl(self, ctx, url: str):\n await self.config.url.set(url)\n await ctx.tick()", "def add_command(self, message, db_session):\n user = self.ts.get_user(message)\n msg_list = self.ts.get_human_readable_message(message).split(' ')\n for index, word in enumerate(msg_list[1:]): # exclude !add_user_command\n if word[0] == '!':\n command = word.lower()\n users = msg_list[1:index + 1]\n response = ' '.join(msg_list[index + 2:])\n break\n else:\n self._add_to_whisper_queue(user, 'Sorry, the command needs to have an ! in it.')\n return\n db_commands = db_session.query(db.Command).all()\n if command[1:] in [db_command.call for db_command in db_commands]:\n self._add_to_whisper_queue(user, 'Sorry, that command already exists. Please delete it first.')\n else:\n db_command = db.Command(call=command[1:], response=response)\n if len(users) != 0:\n users = [user.lower() for user in users]\n permissions = []\n for user in users:\n permissions.append(db.Permission(user_entity=user))\n db_command.permissions = permissions\n db_session.add(db_command)\n self._add_to_whisper_queue(user, 'Command added.')\n my_thread = threading.Thread(target=self.update_command_spreadsheet,\n kwargs={'db_session': db_session})\n my_thread.daemon = True\n my_thread.start()", "def _add_message(self, chan_id: str, msg_id: str):\n if not msg_id in self._messages[chan_id]:\n self._messages[chan_id][msg_id] = {\n \"reactions\": {}\n }\n else:\n raise ValueError(\"ReactionListener tried to create space for an already listened message!\")", "def append_moar_button(self,message,user_id):\n\n data = self.app.bot.hash.hash_b64encode(f'COMMAND {user_id} send_new')\n button = {\n 'text': 'MOAR!',\n 'type': {\n '@type': 'inlineKeyboardButtonTypeCallback',\n 'data': str(data)\n }\n }\n \n if not message.get('reply_markup',None):\n message['reply_markup'] = {\n '@type': 'replyMarkupInlineKeyboard',\n 'rows': []\n }\n\n message['reply_markup']['rows'].append([button])\n return message", "def add_nickname(self, name):\n if not(name in self.nicknames):\n self.nicknames.append(name)", "def send_as_nick(self, command, msg):\n self._write(f':{self.ident.nick} {command} {msg}')" ]
[ "0.7171064", "0.71063775", "0.6896429", "0.68556017", "0.6616149", "0.6526354", "0.6435637", "0.64106715", "0.6385405", "0.6366206", "0.6337566", "0.6321024", "0.6158689", "0.60777974", "0.60739106", "0.6055111", "0.6034358", "0.6017389", "0.5974898", "0.5964543", "0.586553", "0.57447183", "0.5741229", "0.5694492", "0.5681976", "0.5675551", "0.56348896", "0.56328875", "0.5631169", "0.5579233", "0.5568828", "0.55299973", "0.55224794", "0.5515982", "0.55004597", "0.54337215", "0.54323965", "0.5414849", "0.5392664", "0.5391823", "0.53723365", "0.53687024", "0.5364022", "0.53171086", "0.53070885", "0.5303615", "0.53000104", "0.52758247", "0.52661353", "0.5248416", "0.5247558", "0.5244756", "0.52188975", "0.519344", "0.5168909", "0.51467234", "0.5135001", "0.51251835", "0.5114565", "0.5103178", "0.50878286", "0.5084615", "0.50741744", "0.50717795", "0.50509155", "0.50281113", "0.5002828", "0.49893138", "0.4983253", "0.49804968", "0.49710557", "0.49593607", "0.49593607", "0.49593607", "0.4921426", "0.49013656", "0.48866737", "0.4884013", "0.488024", "0.48794007", "0.48794007", "0.48631173", "0.48587924", "0.48559096", "0.48495787", "0.48260576", "0.48230925", "0.4819968", "0.48139572", "0.4809863", "0.48071754", "0.48038456", "0.48004302", "0.47912097", "0.4784839", "0.47832125", "0.47775954", "0.47766697", "0.47732806", "0.4772927" ]
0.75170183
0
Picks who I like the most from the attenders.
async def pick(client, event): users = [event.user] message = yield InteractionResponse(render_joined_users(users), allowed_mentions = None, components = BUTTON_ATTEND) try: async for component_interaction in iter_component_interactions( message, timeout = 60.0, check = functools.partial(check_is_user_unique, users) ): users.append(component_interaction.user) # limit the amount of users to 10. if len(users) == 10: break yield InteractionResponse( render_joined_users(users), allowed_mentions = None, event = component_interaction ) except TimeoutError: component_interaction = None most_liked = pick_most_liked(client, users) content_parts = ['From:'] for user in users: content_parts.append('\n') content_parts.append(user.mention) content_parts.append('\n\nI like ') content_parts.append(most_liked.mention) content_parts.append(' the most.') content = ''.join(content_parts) yield InteractionResponse( content, allowed_mentions = most_liked, components = None, message = message, event = component_interaction )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def choose_winner(): \r\n max_health = Titan.max_health()\r\n winners = tuple((titan.name for titan in Titan.titans if titan.health == max_health))\r\n return winners", "def get_most_popular_merchants(self):\n if self.model:\n return self.model.wv.index_to_key[: self.num_rec]\n else:\n print(\"train the model before performing this step\")\n return None", "def most_positive_user(self):\n rating_max = 0\n rating_max_user = \"\"\n for user in self.users.values():\n rating = user.get_average_rating()\n if rating > rating_max:\n rating_max = rating\n rating_max_user = user\n else:\n continue\n return rating_max_user", "def mostHighlyRatedCastMembers (movies, count, minAppearances):\n def getAverage (allMovies, castMovies):\n # return average rating for all movies in list castMovies\n ratings = [ float(allMovies[key][8]) for key in castMovies if float(allMovies[key][8]) < 10 ]\n numMovies = len(ratings)\n if numMovies > 0:\n return sum(ratings) / numMovies\n else:\n return 0\n # OR:\n # totalRating = 0\n # numMovies = 0\n # for key in castMovies: \n # rating = float(allMovies[key][8]) # rating or profit\n # if rating < 10: # is a rating\n # totalRating += rating\n # numMovies += 1\n # if numMovies > 0:\n # return totalRating / numMovies\n # else:\n # return 0\n\n cast = castFilmography(movies, minAppearances)\n ratings = [ (getAverage(movies, castInfo[1:]), castInfo[0]) for castInfo in cast ]\n return sorted(ratings, reverse=True)[:count]", "def most_popular_gender(data):\n answer = \"\"\n genders = count_gender(data)\n if genders[0] == genders[1]:\n answer = \"Equal\"\n elif genders[0] > genders[1]:\n answer = \"Male\"\n else:\n answer = \"Female\"\n return answer", "def get_best_five(self):\n return sorted(self.speakers.iteritems(),\n key=lambda (key, val): (val, key),\n reverse=True)[:5]", "def get_meals_user_liked(username):\n meals_user_liked = []\n user_liked = Rating.objects.filter(member__username=username, like=True)\n for ratting in user_liked:\n meals_user_liked.append(ratting.meal)\n return meals_user_liked", "def most_powerful_weapon(self):\n # sets inital damge to 0\n max_damage = 0\n # sets the best weapon to nothing\n best_weapon = None\n # Loop for each item in inventory\n for item in self.inventory:\n # Code adapted from Make Your own Python Text Based Adventure\n # tries to see if the item damage is greator than the current max\n # damage and then replaces the best weapon in inventory\n try:\n if item.damage > max_damage:\n best_weapon = item\n max_damage = item.damage\n except AttributeError:\n pass\n # sends the best weapon to function\n return best_weapon", "def get_most_popular(self):\n\t\tpopular_rated = self.data_final[self.data_final['Rating'] == 10]\n\t\tpopular_jokes = popular_rated.groupby('JokeID').count().reset_index()\n\t\tpopular_jokes = popular_jokes[['JokeID','Rating']]\n\t\tpopular_jokes.columns = ['JokeID','Number_rated10']\n\t\ttop_joke = popular_jokes.sort_values(by=['Number_rated10'], ascending=False).head(1)\n\t\ttop_joke_val = top_joke['JokeID'].values[0]\n\t\tjokes_list = sorted(set(self.data_final['JokeID']))\n\t\tjoke_num = jokes_list.index(top_joke_val)\n\t\ttop_joke_desc = self.data_jokes[self.data_jokes['JokeID'] == top_joke_val].values[0][1]\n\n\t\treturn top_joke_desc, joke_num", "def current_mood(self):\n if not self.recent_results:\n log.msg(\"Short-circuiting tally results since there aren't any.\")\n return None, None, None, None\n try:\n good = reduce(lambda x, y: x + 1 if (y is True) else x, self.recent_results, 0)\n except TypeError:\n log.msg(\"Error reducing: %s\" % str(self.recent_results))\n raise\n total = len(self.recent_results)\n percentage = float(good) / float(total)\n choices=[v for a,v in self.MOOD_CHOICES if percentage >= a][0]\n mood=random.choice(choices)\n\n return mood, good, total, percentage", "def list_favor(self):\n if \"all\" in self.switches:\n favors = Reputation.objects.exclude(favor=0).order_by(\"-date_gossip_set\")\n self.msg(\"Characters with favor: %s\" % \", \".join(str(ob) for ob in favors))\n return\n org = self.get_organization(check_perm=False)\n favors = org.reputations.filter(Q(favor__gt=0) | Q(favor__lt=0)).order_by(\n \"-favor\"\n )\n msg = \"{wThose Favored/Disfavored by %s{n\\n\" % org\n msg += \"\\n\\n\".join(\n \"{c%s{w (%s):{n %s\" % (ob.player, ob.favor, ob.npc_gossip) for ob in favors\n )\n self.msg(msg)", "def most_read_book(self):\n reading_max = 0\n most_reads = \"\"\n for book in self.books.keys():\n rating = book.get_average_rating()\n if rating > reading_max:\n most_reads = book\n reading_max = rating\n else:\n continue\n return most_reads", "def more_popular(twitter_data, a, b):\r\n \r\n a_popularity = len(all_followers(twitter_data, a)) \r\n b_popularity = len(all_followers(twitter_data, b))\r\n if a_popularity > b_popularity:\r\n return -1\r\n if a_popularity < b_popularity:\r\n return 1\r\n return username_first(twitter_data, a, b)", "def random_item(self):\n if self.sample_negative_items_empirically:\n # just pick something someone rated!\n u = self.uniform_user()\n i = random.choice(self.dataModel.getItemIDsFromUid(u))\n else:\n i = random.randint(0,self.num_items-1)\n return i", "def __call__(self):\n return random.choice(self.fakers)", "def _select_attribute(self, attributes_list, df):\r\n entropy_all_data = self._entropy(df)\r\n\r\n mAtt = random.sample(attributes_list, math.ceil(math.sqrt(len(attributes_list)))) # select m random attributes\r\n best = 0\r\n \r\n for attr in mAtt:\r\n entropy_attribute = 0\r\n\r\n ## atributos numรฉricos: discretiza para acima da mรฉdia ou abaixo da mรฉdia\r\n if pd.api.types.is_numeric_dtype(df[attr]):\r\n mean = df[attr].mean()\r\n df_attribute_below_average = df.loc[df[attr] < mean]\r\n df_attribute_above_average = df.loc[df[attr] >= mean]\r\n entropy_attribute = len(df_attribute_below_average)/len(df)*self._entropy(df_attribute_below_average) + \\\r\n len(df_attribute_above_average)/len(df)*self._entropy(df_attribute_above_average)\r\n\r\n else:\r\n for value in df[attr].unique():\r\n df_attribute = df.loc[df[attr] == value]\r\n entropy_attribute += len(df_attribute)/len(df)*self._entropy(df_attribute)\r\n\r\n gain = entropy_all_data - entropy_attribute\r\n\r\n if gain >= best:\r\n best = gain\r\n chosen = attr\r\n return chosen", "def top_girls(self):\n return [girl for girl in self._db.girls.find().sort('rating', pymongo.DESCENDING).limit(5)]", "def more_popular(twitter_data, a, b):\n\n a_popularity = len(all_followers(twitter_data, a))\n b_popularity = len(all_followers(twitter_data, b))\n if a_popularity > b_popularity:\n return -1\n if a_popularity < b_popularity:\n return 1\n return username_first(twitter_data, a, b)", "def most_common_mutants(self):\n highest_readcount = max([mutant.read_info(self.dataset_name).total_read_count for mutant in self.dataset])\n highest_readcount_mutants = [mutant for mutant in self.dataset \n if mutant.read_info(self.dataset_name).total_read_count==highest_readcount]\n return highest_readcount_mutants", "def recommend(user_id):\n\n df = pd.read_sql(DATABASE_URL, index_col=\"id\", columns=[\"sex\", \"age\", \"haversine_distance\"])\n\n k = 5\n similarity = get_demographic_similarity(df, user_id)\n similarity = similarity.sort()[::-1]\n\n users = similarity[1:1 + k]\n\n # Get the charities then select the most common\n charity_counts = {}\n for user in users:\n charity_counts.ad", "def random_item(self):\n if self.sample_negative_items_empirically:\n # just pick something someone rated!\n # TODO: choose a user randomly\n u = self.uniform_user()\n i = random.choice(self.data[u].indices)\n else:\n i = random.randint(0, self.num_items - 1)\n return i", "def youngest():\n def get_age(person_list):\n return person_list['age']\n return sorted(PEOPLE_LIST, key = get_age)", "def select_leader(self):\n\n if self.leaders.size() == 1:\n return self.leaders.rand_choice()\n\n candidates = self.leaders.rand_sample(2)\n\n # randomly favourize one of them\n # best_global = choice(candidates)\n\n # should select those which has bigger fitness\n # # if one of them dominates, it will be selected as global best\n # dom = self.dominance.compare(candidates[0].costs_signed, candidates[1].costs_signed)\n #\n # if dom == 1:\n # best_global = candidates[0]\n #\n # if dom == 2:\n # best_global = candidates[1]\n\n if candidates[1].features['crowding_distance'] > candidates[0].features['crowding_distance']:\n best_global = candidates[1]\n else:\n best_global = candidates[0]\n return best_global", "def select_leader(self):\n\n if self.leaders.size() == 1:\n return self.leaders.rand_choice()\n\n candidates = self.leaders.rand_sample(2)\n\n # randomly favourize one of them\n # best_global = choice(candidates)\n\n # should select those which has bigger fitness\n # # if one of them dominates, it will be selected as global best\n # dom = self.dominance.compare(candidates[0].costs_signed, candidates[1].costs_signed)\n #\n # if dom == 1:\n # best_global = candidates[0]\n #\n # if dom == 2:\n # best_global = candidates[1]\n\n if candidates[1].features['crowding_distance'] > candidates[0].features['crowding_distance']:\n best_global = candidates[1]\n else:\n best_global = candidates[0]\n return best_global", "def selector(self, dataset, attributes, target_attr):\n\n best_gain = 0.0\n best_attr = None\n \n for attr in attributes:\n gain = self.splitmetric(dataset, attr, target_attr)\n if (gain >= best_gain and attr != target_attr):\n best_gain = gain\n best_attr = attr\n \n return best_attr", "def career_choice (upp): #input upp list\r\n\tif upp[4]==max(upp):\r\n\t\tcareer=Navy\r\n\telif upp[0]==max(upp):\r\n\t\tcareer=stellagama.random_choice([Scouts, Marines])\r\n\telif upp[2]==max(upp):\r\n\t\tcareer=Army\r\n\telif upp[3]==max(upp):\r\n\t\tcareer=Merchants\r\n\telse:\r\n\t\tcareer=Other\r\n\treturn career #outputs the chatacter's career\r", "def get_best_friends(self):\n query = read_query('content exploration/best_friends')\n response = self._submit_query(query)\n return [(elem['name']['value'], elem['num_chat']['value'].split('/')[-1]) for elem in response]", "def get_personal_best(self):\n return self._personal_best", "def get_best_speaker(self):\n max_val = -33.0\n try:\n self.value = max(self.speakers.values())\n except ValueError:\n self.value = -100\n _speaker = 'unknown'\n distance = self.get_distance()\n \n if len(self.speakers.values()) >1:\n mean_distance = self.get_m_distance()\n else:\n mean_distance = .5\n \n thres = 0\n \n if distance > -1:\n thres = max_val - distance\n else: thres = max_val\n \n if self.value >= thres and mean_distance > .49:\n for spk in self.speakers:\n if self.speakers[spk] == self.value:\n _speaker = spk\n break\n \n if distance > -1 and distance < .07:\n _speaker = 'unknown'\n \n return _speaker", "def female_name():\n return dice.choice(names.woman)", "def _get_max_estimated_bandit(self)->Bandit:\n # print(\"mus - \", self.mu)\n # print(\"actions - \", np.argmax(self.mu))\n unique, counts = np.unique(self.mu, return_counts=True)\n lens = counts[np.argmax(unique)] \n if lens>1: # if two actions have same argmax\n # then return arbitrarily from those max ones\n maxs = list(np.array(self.bandits)[self.mu==unique[np.argmax(unique)]])\n return np.random.choice(maxs)\n # otherwise return the max one\n return self.bandits[np.argmax(self.mu)]", "def genders(self):\n\n return Client.gender_choices", "def youngest():\n # fill it out\n newlist = sorted(PEOPLE_LIST, key=itemgetter('age'))\n return newlist", "def showBestBetUse(self) :\n bestBetUse = 0\n for level in self.level_history :\n bestBetUse = level.bet if bestBetUse < level.bet else bestBetUse\n Scenario.messageGetBestBetUse(bestBetUse)", "def get_meals_user_disliked(username):\n meals_user_disliked = []\n user_disliked = Rating.objects.filter(member__username=username, like=False)\n for ratting in user_disliked:\n meals_user_disliked.append(ratting.meal)\n return meals_user_disliked", "def min_max_rating(self):\n return self.interaction_data.label.min(), self.interaction_data.label.max()", "def _select_heuristic(self):\n\n # take a sample of rewards from the current prior of heuristics\n sample_rewards = np.random.normal(self.prior_mus, self.prior_sigmas)\n\n # select the heuristic that has the highest reward sample value\n self.best_heuristic_idx = np.argmax(sample_rewards)\n self.best_heuristic = self.heuristics[self.best_heuristic_idx]\n self.heuristic_selection.append(self.best_heuristic_idx)", "def get_targeted_insult(botnames, targeted):\n insult = random.choice(targeted)\n insult = insult.replace(\"_user_name_\", random.choice(botnames))\n return insult", "def random_item(self):\n if self.sample_negative_items_empirically:\n # just pick something someone rated!\n u = self.uniform_user()\n i = random.choice(self.data[u].indices)\n else:\n i = random.randint(0,self.num_items-1)\n return i", "def best_match(beer):\n # get a list of donuts that match sugar content for beer\n candidates = get_candidates(beer)\n span = tracer.current_span()\n span.set_tag('donuts.candidates', candidates)\n\n # send the remaining candidates to our taster and pick the best\n max_score = -1\n best_match = None\n\n for candidate in candidates:\n try:\n resp = requests.get(\n \"http://taster:5001/taste\",\n params={\"beer\": beer.name, \"donut\": candidate},\n timeout=2,\n )\n except requests.exceptions.Timeout:\n continue\n\n score = resp.json()[\"score\"]\n if score > max_score:\n max_score = score\n best_match = candidate\n\n return best_match", "def pick_steal_target(self, callback=None):\n players_copy = copy.copy(self.game.players)\n players_copy.remove(self) # don't pick yourself!\n wealthy_player = max(self.game.players, key=lambda x: x.coins)\n callback(wealthy_player)", "def find_reader_relations():\n for reader in readers:\n d100 = random.randint(1, 100)\n if d100 <= 50:\n reader_favourite_book[readers[reader]] = random.choice(list(book_ids.values()))\n\n d100 = random.randint(1, 100)\n if d100 <= 5:\n #TODO: fix so that you cannot be friend of yourself\n reader_knows[readers[reader]] = [random.choice(list(authors.values()))] + [random.choice(list(readers.values()))]\n elif d100 > 5 and d100 <= 10:\n reader_knows[readers[reader]] = [random.choice(list(authors.values()))]\n elif d100 > 10 and d100 <= 25:\n reader_knows[readers[reader]] = [random.choice(list(readers.values()))] + [random.choice(list(readers.values()))]\n elif d100 > 25 and d100 <= 50:\n reader_knows[readers[reader]] = [random.choice(list(readers.values()))]", "def female_first():\r\n cursor.execute('SELECT name FROM female order by RANDOM() limit 1')\r\n return cursor.fetchone()[0]", "def get_best(self):\n scores, ids = self.sort_best()\n return scores[1], ids[1]", "def most_popular(user_to_tweet: Dict[str, List[tuple]], date1: int, date2: int)\\\n -> str:\n user_to_pop = {}\n most_popular_user = ''\n count = 0\n \n \n for user in user_to_tweet:\n popularity = 0\n for i in range(len(user_to_tweet[user])):\n if user_to_tweet[user][i][TWEET_DATE_INDEX] >= date1 and\\\n user_to_tweet[user][i][TWEET_DATE_INDEX] <= date2:\n popularity = popularity + \\\n user_to_tweet[user][i][TWEET_FAVOURITE_INDEX] +\\\n user_to_tweet[user][i][TWEET_RETWEET_INDEX]\n user_to_pop[user] = popularity\n \n for users in user_to_pop:\n if user_to_pop[users] == \\\n max(user_to_pop.values()):\n most_popular_user = most_popular_user + users\n count = count + 1\n \n if count > 1:\n return 'tie'\n return most_popular_user", "def top_boys(self):\n return [boy for boy in self._db.boys.find().sort('rating', pymongo.DESCENDING).limit(5)]", "def pick_disaster():\n num = random.randint(0, 4)\n disasters = disaster_list.keys()\n current_disaster = disasters[num]\n disaster_count[current_disaster] += 1\n return current_disaster", "def male_name():\n return dice.choice(names.man)", "def name():\r\n return _random.choice([male_first(), female_first()])", "def answersuggestion(self,character,room,weapon,suggester):\n # TODO: suggester is always self.ixHotSeat? Could do away with param\n #\n # Set of showable card IDs\n setShowables = self.myCardSet & set((character,room,weapon))\n # List of showable card IDs\n showables = list(setShowables)\n # List of indices in myCards of showable card IDs\n ixShowables = [self.myCards.index(card) for card in showables]\n # List of indices in myCards of showable and already shown (to this \n # suggester) card IDs\n ixShownShowables = np.array([ixShowables[ix] for ix in \n range(len(ixShowables)) \n if self.myCardsShownTo[suggester-1,ixShowables[ix]]])\n # ixShowables is to be a numpy array too - but has to be transformed \n # after the list comprehension lest an error be thrown on empty list\n ixShowables = np.array(ixShowables)\n if len(ixShowables):\n # We have one or more of the suggested cards and need a strategy to\n # show the least useful one to our opponent!\n if len(ixShownShowables):\n # If we have cards we have shown this player before; select the\n # one of these we have shown the most times previously.\n ixCard = ixShownShowables[\n np.argmax(self.myCardsShownCounts[ixShownShowables])]\n self.myCardsShownCounts[ixCard] += 1\n self.event_seenresponse(self.myCards[ixCard],0,suggester)\n return self.myCards[ixCard]\n else:\n # Otherwise, select the card included in the suggestion we have\n # shown the most times previously\n ixCard = ixShowables[\n np.argmax(self.myCardsShownCounts[ixShowables])]\n self.myCardsShownTo[suggester-1,ixCard] = True\n self.myCardsShownCounts[ixCard] += 1\n self.event_seenresponse(self.myCards[ixCard],0,suggester)\n return self.myCards[ixCard]\n else:\n # We have none of the suggested cards - must pass\n self.event_pass(character,room,weapon,0)\n return game.NULLCARD", "def highest_rated_book(self):\n rating_max = 0\n best_rated_book = \"\"\n for book in self.books.keys():\n rating = book.get_average_rating()\n if rating > rating_max:\n rating_max = rating\n best_rated_book = book\n else:\n continue\n return best_rated_book", "def attributeSelection(data, attributes, class_label, indices=None):\n\tbest = 0\n\tbestIndex = 0\n\tcounter = 0\n\tfor i in attributes:\n\t\tinfoG = informationGain(data, class_label, i, indices)\n\t\tif infoG > best:\n\t\t\tbest = infoG\n\t\t\tbestIndex = counter\n\t\tcounter += 1 \n\t\n\treturn bestIndex", "def getMostUsedCount( self, limit ):\n cur = self.__conn.cursor()\n cur.execute( \"\"\"SELECT Data, COUNT(Data) AS UseCount\n FROM PrivilegeUse\n GROUP BY Data\n ORDER BY UseCount DESC\n LIMIT %d\"\"\", limit )\n class Use:\n def __init__( self, faq, count ):\n self.faq = faq\n self.count = count\n \n return [ Use(row[0], row[1]) for row in cur.fetchall() ]", "def get_best_fitness(self):\n f = max(self.characters, key=operator.attrgetter('fitness'))\n self.best_fitness = round(f.fitness, 3)\n self.best_candidate = f", "def recommend_me_team(self, user_id: int, user_ids_per_group: list, n_recommendations: int):\n res_arr = np.array([self.user_similarity.loc[user_id, group_ids].mean() for group_ids in user_ids_per_group])\n res_inds = np.argsort(res_arr)[::-1][:n_recommendations]\n return res_inds, res_arr[res_inds]", "def recommend(r ,username, users):\r\n # first find nearest neighbor\r\n nearest = computeNearestNeighbor(r, username, users)[0][1]\r\n recommendations = []\r\n # now find bands neighbor rated that user didn't\r\n neighborRatings = users[nearest]\r\n userRatings = users[username]\r\n for artist in neighborRatings:\r\n if not artist in userRatings:\r\n recommendations.append((artist, neighborRatings[artist]))\r\n # using the fn sorted for variety - sort is more efficient\r\n return sorted(recommendations, key=lambda artistTuple: artistTuple[1], reverse = True)", "def _best_individual(self):\n return max(self._population, key=attrgetter(\"fitness\"))", "def gender():\n return random.choice((GENDER_FEMALE, GENDER_MALE))", "def get_food_most_calories(df=df):\r\n max_calories_row = df.loc[df['Calories'].idxmax()]\r\n return max_calories_row['Item']", "def most_likes(data):\r\n max_likes = 0\r\n for key in data:\r\n num_likes = len(data[key])\r\n if num_likes >= max_likes:\r\n max_likes = num_likes\r\n most_likes_users = []\r\n for key in data:\r\n if key[-1] != \"$\":\r\n num_likes = len(data[key])\r\n if num_likes == max_likes:\r\n most_likes_users += [key]\r\n return most_likes_users", "def get_lucky(self):\n lucky_animal = random.choice(self.adoption_pool)\n return self.finish_successful_adoption(lucky_animal)", "def beer_reccomendations(data: pd.DataFrame):\n # Add written review polarity and subjectivity using TextBlob sentiment analysis\n data = utils.add_review_polarity_subjectivity(data)\n\n # Get best beeres by indexing beer ID with top review polarity and review overall\n best_beers = data['beer_beerId'].loc[ (data['review_polarity'] >= 0.85) & (data['review_overall']==5) ]\n\n print(f\"These three beer reccomendations have 5 star reviews and top positive scores based on written reviews: {best_beers[0:3]}\")", "def random_pick (self, checkfn=None):\n tweight = self.total_weight(checkfn=checkfn)\n if tweight == 0:\n return None, None\n\n n = random.uniform(0, tweight)\n\n for num, item in enumerate(self):\n if checkfn is not None and not checkfn(item):\n continue\n\n if item.weight >= n:\n return num, item\n n = n - item.weight\n\n return None, None", "def client_with_most_rented_genre(self, genre):\n self.__validator.validate_genre(genre)\n genre = self.__formatter.format_genre(genre)\n\n rentals = self.get_list()\n number_of_rented_movies_genre = dict.fromkeys([rental.client for rental in rentals], 0)\n for rental in rentals:\n if genre in rental.movie.genre:\n number_of_rented_movies_genre[rental.client] += 1\n clients = []\n for client, rentals in number_of_rented_movies_genre.items():\n clients.append(ClientDTO(client, rentals))\n client = sorted(clients, key = lambda client: client.no_rentals, reverse=True)[0]\n if client.no_rentals == 0:\n raise ValueError(\"Nu exista clienti care sa fi inchiriat filme avand genul \" + str(genre))\n return client", "def select_random_meme(self):\n cursor = self.conn.cursor()\n cursor.execute(f\"select meme_id from memes where include_random and not blacklisted order by random() limit 1\")\n result = cursor.fetchone()\n cursor.close()\n return result[0]", "def best(self, side):\n return Library.functions.best(self._book, side)", "def carnivore_eats(self):\n self.order_by_fitness()\n for carn in self.fauna_list['Carnivore']:\n food_required = carn.parameters['F']\n amount_to_eat = 0\n not_eaten_animals = []\n for i, herb in enumerate(self.fauna_list['Herbivore']):\n if food_required <= amount_to_eat:\n not_eaten_animals.extend(self.fauna_list['Herbivore'][i:])\n break\n elif np.random.random() < carn.probability_of_kill(herb):\n if food_required - amount_to_eat < herb.weight:\n amount_to_eat += herb.weight\n elif food_required - amount_to_eat > herb.weight:\n amount_to_eat += food_required - amount_to_eat\n else:\n not_eaten_animals.append(herb)\n carn.animal_eats(amount_to_eat)\n self.fauna_list['Herbivore'] = not_eaten_animals", "def favorite_beer_based_on_written_reviews(data: pd.DataFrame):\n # Add written review polarity and subjectivity using TextBlob sentiment analysis\n data = utils.add_review_polarity_subjectivity(data)\n\n # Get top beer styles by selecting reviews with polarity >= 0.65\n top_styles = data['beer_style'].loc[data['revew_polarity'] >= 0.65].value_counts()\n\n print(f\"The favorite beer style based on written reviews is {top_styles.index[0]}\")", "def get_best_matching(self):\r\n from django.db.models import Q, Avg\r\n import copy\r\n import operator\r\n if self.user is None:\r\n raise User.DoesNotExist\r\n users = User.objects.all()\r\n if self.type == FRIENDS_ONLY:\r\n friends = Friends.objects.filter(Q(user_one_id=self.user) | Q(user_two_id=self.user))\r\n if len(friends) > 2:\r\n users = users.filter(Q(id=friends.values('user_one_id')) | Q(id=friends.values('user_two_id')))\r\n\r\n user_sims = {}\r\n prefs = {}\r\n for user in users:\r\n tab = {}\r\n scores = Score.objects.values('game_id', 'score').filter(user_id=user)\r\n for score in scores:\r\n tab.update({score['game_id']: score['score']})\r\n prefs.update({copy.deepcopy(user.id): copy.deepcopy(tab)})\r\n\r\n for user in users:\r\n sim = self.pearson(prefs, self.user, user.id)\r\n user_sims.update({user.id: sim})\r\n\r\n print(user_sims)\r\n del user_sims[self.user] # deletion of user for whom the analysis is beeing performed\r\n user_sims = sorted(user_sims.items(), key=operator.itemgetter(1), reverse=True) # dictionary containing user_ids and users' similarities\r\n if len(user_sims) < 3:\r\n return self.get_most_popular()\r\n\r\n games_f = Score.objects.values('game_id', 'score').filter(user_id=user_sims[0][0]).order_by('-score')[:3]\r\n games_s = Score.objects.values('game_id', 'score').filter(user_id=user_sims[1][0]).order_by('-score')[:3]\r\n\r\n recommended_games = {}\r\n grd = {}\r\n games_f_dict = dict([(g['game_id'], g['score']) for g in games_f])\r\n recommended_games.update(dict(sorted(games_f_dict.items(), key=operator.itemgetter(1), reverse=True)))\r\n\r\n games_s_dict = dict([(g['game_id'], g['score']) for g in games_s])\r\n recommended_games.update(dict(sorted(games_s_dict.items(), key=operator.itemgetter(1), reverse=True)))\r\n\r\n for game in recommended_games:\r\n scores = list(Score.objects.values('game_id').filter(game_id=game).annotate(Avg('score')))\r\n idn = scores[0]['game_id']\r\n avg = scores[0]['score__avg']\r\n grd.update({idn: avg})\r\n\r\n return grd", "def personal_best(scores):\n return max(scores)", "def how_popular_most_popular(data):\r\n #list of artists\r\n artists = []\r\n for key in data:\r\n if key[-1] != \"$\":\r\n for x in data[key]:\r\n artists += [x]\r\n sorted_artists = selection_sort(artists)\r\n count = 1\r\n max_count = 1\r\n max_artists = []\r\n for i in range(len(sorted_artists)-1):\r\n #ends at second to last index because I use i and i + 1\r\n if sorted_artists[i] == sorted_artists[i+1]:\r\n count += 1\r\n else:\r\n if count == max_count:\r\n max_artists += [sorted_artists[i]]\r\n count = 1\r\n elif count > max_count:\r\n max_artists = []\r\n max_artists += [sorted_artists[i]]\r\n max_count = count\r\n count = 1\r\n return max_count", "def _get_friends_random_list(self, citizen):\r\n \r\n number_friends = int(random.uniform(len(citizen.friends)*0.05, len(citizen.friends)*0.2))\r\n return random.sample(citizen.friends, number_friends)", "def get_friend_with_most_friends(friendships=friendships):\n friend_list = defaultdict(list)\n for i, j in friendships:\n friend1 = users[i]\n friend2 = users[j]\n friend_list[friend1].append(friend2)\n friend_list[friend2].append(friend1)\n user_name = sorted(friend_list, key=lambda friend: len(friend_list[friend]), reverse=True)[0]\n user_friends = friend_list[user_name]\n return user_name, user_friends", "def someone():\n return random.choice(list(filter(lambda member: member != ADMIN, CHANNEL_MEMBERS)))", "def get_champion(self):\n return max(self.genomes)", "def male_first():\r\n cursor.execute('SELECT name FROM male order by RANDOM() limit 1')\r\n return cursor.fetchone()[0]", "def suggested(max: int = None):\n for user_dict in client.suggested(max=max):\n print(json.dumps(user_dict))", "def get_youngest_five():\n\n try:\n from models import User\n\n users = session.query(User).order_by(asc(User.age)).limit(5)\n user_list = []\n for user in users:\n user_list.append(user)\n\n return sorted(user_list, key=lambda u: u.name)\n except Exception as e:\n logger.error(e)", "def get_five_random(self):\r\n if self.get_length() > 5:\r\n random_selection = []\r\n\r\n from random import randrange\r\n\r\n for i in range(0, 5):\r\n while True:\r\n rnd = randrange(0, self.get_length())\r\n if self.get_tweet(rnd) not in random_selection:\r\n random_selection.append(self.get_tweet(rnd))\r\n break\r\n return random_selection\r\n else:\r\n return self.tweets", "def _name_champion(self):\n # TODO BREAK TIES\n return max(self.teams, key=lambda team: len(team.wins))", "def recommend_for_new_user(titles=False, n_max=10):\n return reader.UserList().get_most_popular_articles(titles=titles)[: n_max]", "def get_recommended_games(user):\n games = list(Game.objects.all())\n games.sort(key = lambda x: abs(x.level-user.level))\n return games[0:5]", "def get_recommendations(name, data):\r\n #sorts preferences in alphabetical order\r\n #do this to make it easier to compare\r\n for key in data:\r\n data[key] = selection_sort(data[key])\r\n most_similar_key = \"\"\r\n max_matches = 0\r\n for key in data:\r\n if not(key[-1] == \"$\" or data[key] == data[name]):\r\n \"\"\"if the person is not private or does not have the same data\"\"\"\r\n matches = num_matches(data[key], data[name])\r\n if matches > max_matches:\r\n most_similar_key = key\r\n max_matches = matches\r\n if most_similar_key == \"\":\r\n print(\"No recommendations available at this time\")\r\n return 1\r\n else:\r\n final_recommendations = []\r\n for x in data[most_similar_key]:\r\n if x not in data[name]:\r\n final_recommendations += [x]\r\n return final_recommendations", "def fingers(self):\n\n return Finger.name_choices", "def showWorstBetUse(self) :\n worstBetUse = self.level_history[0].bet\n for level in self.level_history :\n worstBetUse = level.bet if worstBetUse > level.bet else worstBetUse\n Scenario.messageGetWorstBetUse(worstBetUse)", "def friends(max_shyness, audience):\r\n\tcurrently_standing = 0\r\n\taudience_needed = 0\r\n\tfor i in range(max_shyness+1):\r\n\t\tif currently_standing < i:\r\n\t\t\taudience_needed += 1\r\n\t\t\tcurrently_standing = i\r\n\t\tcurrently_standing += int(audience[i])\r\n\treturn audience_needed", "def get_fortune():\n data_file = get_data_file()\n fortunes=get_fortunes(data_file)\n return random.choice(fortunes)", "def choose_target(self, agents):\n\n number_of_suspects = [0]*(len(agents))\n number_of_suspects_per_agent = []\n\n index = 0\n for a1 in agents:\n if not a1.is_impostor():\n for a2 in agents:\n if self.km.suspects(a1.agent_id, a2.agent_id):\n number_of_suspects[index] = number_of_suspects[index] + 1\n else:\n number_of_suspects[index] = 999999\n number_of_suspects_per_agent.append((a1.agent_id,number_of_suspects[index]))\n index = index + 1\n\n self.target = min(number_of_suspects_per_agent, key = lambda t: t[1])[0]", "def _filter_to_most_specific(self, graph, classlist):\n candidates = {}\n for brickclass in classlist:\n sc_query = f\"SELECT ?subclass WHERE {{ ?subclass rdfs:subClassOf+ <{brickclass}> }}\"\n subclasses = set([x[0] for x in graph.query(sc_query)])\n # if there are NO subclasses of 'brickclass', then it is specific\n if len(subclasses) == 0:\n candidates[brickclass] = 0\n continue\n # 'subclasses' are the subclasses of 'brickclass'. If any of these appear in\n # 'classlist', then we know that 'brickclass' is not the most specific\n intersection = set(classlist).intersection(subclasses)\n if len(intersection) == 1 and brickclass in intersection:\n candidates[brickclass] = 1\n else:\n candidates[brickclass] = len(intersection)\n most_specific = None\n mincount = float(\"inf\")\n for specific, score in candidates.items():\n if score < mincount:\n most_specific = specific\n mincount = score\n return most_specific", "def get_food_most_calories(df=df):\n return df[df.Calories == df.Calories.max()][\"Item\"].values[0]", "def most_popular(self, n):\n return popular_tags", "def assign_popularity_to_tweet(self, influencer, tweet):\n twNoLike = self.userTweetsStat[influencer][0][tweet]['like']\n twNoRt = self.userTweetsStat[influencer][0][tweet]['RT']\n twNoFlwr = self.userTweetsStat[influencer][0][tweet]['follower']\n twPopularity = (twNoLike + 2*twNoRt)/twNoFlwr\n \n return twPopularity", "def gender():\r\n\r\n return _random.choice(['Male', 'Female'])", "def getHighestRank_Toilet(self):\n\n # filter out low confidences\n #maxConfidence = max(self.Predictors, key=operator.attrgetter('confidence'))\n #p = [p for p in self.Predictors if p.confidence == maxConfidence]\n \n \n p = self.Predictors\n \n if len(p) == 1:\n # only one predictor has high confidence\n chosenPredictor = p[0]\n elif len(p) > 1:\n random.shuffle(p, random = rps.randomRange)\n \n # drop the first 37% and grab the best \n drop = round(len(p) * 0.37) - 1\n initial = p[:drop]\n maxConfidence = max(initial, key=operator.attrgetter('confidence'))\n maxConfidence = maxConfidence.confidence\n \n toCheck = p[drop:]\n for p in toCheck:\n if p.confidence >= maxConfidence:\n chosenPredictor = p\n break\n else:\n chosenPredictor = toCheck[-1]\n \n rankConfidence = chosenPredictor.confidence\n return chosenPredictor, rankConfidence", "def __choose_best_matching_candidate(candidates, artist):\n\n artist_names = set()\n for match in candidates:\n artist_names.add(match[1])\n\n # If there is more than 1 matched artist:\n if len(artist_names) > 1:\n \n best_distance = 10000\n best_artist = \"\"\n\n # Calculate the levenshtein edit distance between the searched artist name and the artist names in the search results.\n for matched_artist in artist_names:\n distance = editdistance.eval(matched_artist, artist)\n if distance < best_distance:\n best_distance = distance\n best_artist = matched_artist\n\n # Then exclude from candidates all matches that are NOT from the best artist\n candidates = [candidate for candidate in candidates if candidate[1] == best_artist]\n else:\n best_artist = artist_names.pop()\n best_distance = editdistance.eval(best_artist, artist)\n\n # Threshold candidate name to the artist name\n ratio = best_distance/len(artist)\n # Allow ~15% difference\n if ratio > 0.15:\n raise MatchNotFoundError(\"Closest artist is too far of the queried artist\")\n\n # Descending list\n sort_on_num_ratings = sorted(candidates, key=lambda cand: cand[2], reverse=True)\n\n # Take the one with the most votes\n selected = sort_on_num_ratings[0]\n\n # Unless it has a rating lower than 4.\n if selected[3] < 4:\n\n sort_on_rating = sorted(candidates, key=lambda cand: cand[3], reverse=True)\n\n # If there is one with a rating higher than 4, select that one. \n if sort_on_rating[0][3] > 4:\n selected = sort_on_rating[0]\n\n return selected", "def find_male_adj(novel):\n return find_gender_adj(novel, False)", "def get_most_popular_annotations(ambiguous_entity, k=2):\n freq = [(key, len(value)) for key, value in ambiguous_entity.annotated_corpus.items()]\n freq = sorted(freq, key=lambda x: x[1], reverse=True)\n return [x[0] for x in freq[:k]]", "def most_popular_artist(our_data):\n counter_dict = {}\n for artist in all_artists(our_data):\n if artist in counter_dict:\n counter_dict[artist] += 1\n else:\n counter_dict[artist] = 1\n maximum_albums = max(counter_dict.values())\n artist_lists = []\n for keys, values in counter_dict.items():\n if values == maximum_albums:\n artist_lists.append(keys) \n return artist_lists", "def max_gain(self):\n if self.val1:\n val1_gain_tuple, val0_gain_tuple = self.val1.max_gain(), self.val0.max_gain()\n if val1_gain_tuple.gain > val0_gain_tuple.gain:\n return val1_gain_tuple\n else:\n return val0_gain_tuple\n elif self.attributes:\n filtered_data = filter_data(self.data,self.ancestors)\n max_attribute, max_gain = max([(attribute,\n self.heuristic(self,attribute)) for attribute in self.attributes],\n key = lambda x: x[1])\n return gain_tuple(self, max_attribute, max_gain)\n return gain_tuple(None, '', 0)", "def _choose_best_option(self):" ]
[ "0.5782652", "0.5748125", "0.57463026", "0.57423043", "0.57265854", "0.5470534", "0.54608333", "0.54510725", "0.5380608", "0.5367481", "0.5348207", "0.53333825", "0.5278549", "0.52687156", "0.5235468", "0.52225065", "0.52134794", "0.5213257", "0.52034616", "0.5179194", "0.5176852", "0.51672465", "0.51575154", "0.51575154", "0.5134499", "0.51328886", "0.5127388", "0.51221085", "0.510155", "0.50624454", "0.50610495", "0.5050896", "0.50392234", "0.4996129", "0.49941596", "0.49906772", "0.499013", "0.49740967", "0.4974036", "0.49697903", "0.49635625", "0.49467522", "0.49466652", "0.49396548", "0.49361476", "0.49268118", "0.4921792", "0.4914612", "0.4907743", "0.49046278", "0.49030557", "0.4900437", "0.4898707", "0.48913226", "0.48861405", "0.48857743", "0.4882785", "0.4866731", "0.4856215", "0.48502716", "0.48499933", "0.4849565", "0.48432374", "0.4828343", "0.48266858", "0.48228374", "0.48217037", "0.4817616", "0.48161757", "0.4814744", "0.4814221", "0.48015544", "0.47972292", "0.47956884", "0.47947598", "0.4789165", "0.47885355", "0.47844273", "0.47835794", "0.47737393", "0.47723833", "0.477206", "0.47703728", "0.4759389", "0.47581872", "0.47581795", "0.47566223", "0.47526547", "0.47511786", "0.47499612", "0.47455227", "0.47391874", "0.4726806", "0.47250533", "0.47119144", "0.47112137", "0.4709117", "0.47022662", "0.47014955", "0.46970943" ]
0.5174069
21
Creates an introduction embed after filling a form.
async def introduce_myself(): return INTRODUCTION_FORM
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render_intro():\n return render_template(\"survey_start.html\", survey_title = survey.title, \n survey_instructions = survey.instructions)", "def _build_about_embed(self) -> discord.Embed:\n with self.about_aoc_filepath.open(\"r\", encoding=\"utf8\") as f:\n embed_fields = json.load(f)\n\n about_embed = discord.Embed(title=self._base_url, colour=Colours.soft_green, url=self._base_url)\n about_embed.set_author(name=\"Advent of Code\", url=self._base_url)\n for field in embed_fields:\n about_embed.add_field(**field)\n\n about_embed.set_footer(text=f\"Last Updated (UTC): {datetime.utcnow()}\")\n\n return about_embed", "def IntroFieldWidget(field, request):\n return introFieldWidgetFactory(field, request)", "def add_oopsy_form_new_description():\n form = AddOopsyForm({'description': 'Add new', 'new_description': 'New description', 'points': 3})\n return form", "def add_oopsy_form_set_description():\n form = AddOopsyForm({'description': 'Left mess', 'new_description': '', 'points': 3})\n return form", "def add_smiley_form_new_description():\n form = AddSmileyForm({'description': 'Add new', 'new_description': 'New description', 'points': 3})\n return form", "def intro_slide(prs):\n # pylint: disable=too-many-locals\n slide = prs.slides.add_slide(prs.slide_layouts[BLANK_SLIDE])\n slide = slide_title_header(\n slide, 'Explanation of Analysis', include_time=False)\n\n explanation_file_path = os.path.join(\"resources\", \"metric_explanation.txt\")\n\n if os.path.exists(explanation_file_path):\n with open(explanation_file_path, 'r', encoding='utf-8') as filer:\n content = filer.readlines()\n\n content2 = []\n for cont in content:\n cropped = cont.split('\\r\\n')[0]\n content2.append(cropped)\n content = content2\n filer.close()\n\n top = Inches(0.81)\n left = Inches(0.42)\n width = Inches(11)\n height = Inches(6)\n txt_box = slide.shapes.add_textbox(left, top, width, height)\n text_frame = txt_box.text_frame\n text_frame.word_wrap = True\n\n paragraph = text_frame.paragraphs[0]\n paragraph.text = content[0]\n paragraph.font.size = Pt(12)\n paragraph.font.bold = True\n\n for i in range(1, len(content)):\n paragraph = text_frame.add_paragraph()\n paragraph.text = content[i]\n\n if i == 5:\n paragraph.font.size = Pt(12)\n paragraph.font.bold = True\n else:\n paragraph.font.size = Pt(10)\n paragraph.font.bold = False\n\n else:\n print(f\"{WARNING}WARNING - file 'metric_explanation.txt' not found.{NORMAL}\")\n\n return prs", "def add_intro(self):\n page = lambda x: pkgutil.get_data(\n 'pyscp_ebooks',\n 'resources/wanderers_library/{}.xhtml'.format(x)).decode('UTF-8')\n self.add_page('Cover Page', page('cover'))\n self.add_page('Introduction', page('intro'))\n license = parser.bs(page('license'))\n license.find(class_='footer').string = arrow.now().format('YYYY-MM-DD')\n self.add_page('License', license.div.prettify())\n self.add_page('Title Page', page('title'))", "def form(update, context):\n update.message.reply_text(\"\"\"Fill out the form ๐Ÿ‘‡ ๐Ÿ‘‡ ๐Ÿ‘‡\n https://forms.gle/VREhdtCNqJ6rZNfQ7\"\"\")", "def introFieldWidgetFactory(field, request):\n return widget.FieldWidget(field, IntroWidget(request))", "def show_form():\n\n prompts = story.prompts\n\n return render_template(\"base.html\", prompts = prompts )", "async def prepembed(ctx, channel:discord.TextChannel, *, jsonInput):\n jso = json.loads(jsonInput)\n title = jso['title'] if 'title' in jso else \"\"\n desc = jso['description'] if 'description' in jso else \"\"\n titleUrl = jso['titleUrl'] if 'titleUrl' in jso else \"\"\n hexcolor = jso['hexColor'] if 'hexColor' in jso else \"#2E66B6\"\n webcolor = jso['webColor'] if 'webColor' in jso else \"\"\n thumbnailUrl = jso['thumbnailUrl'] if 'thumbnailUrl' in jso else \"\"\n authorName = jso['authorName'] if 'authorName' in jso else \"\"\n authorUrl = jso['authorUrl'] if 'authorUrl' in jso else \"\"\n authorIcon = jso['authorIcon'] if 'authorIcon' in jso else \"\"\n if 'author' in jso:\n authorName = ctx.message.author.name\n authorIcon = ctx.message.author.avatar_url_as(format=\"jpg\")\n fields = jso['fields'] if 'fields' in jso else \"\"\n footerText = jso['footerText'] if 'footerText' in jso else \"\"\n footerUrl = jso['footerUrl'] if 'footerUrl' in jso else \"\"\n imageUrl = jso['imageUrl'] if 'imageUrl' in jso else \"\"\n embed = assemble_embed(\n title=title,\n desc=desc,\n titleUrl=titleUrl,\n hexcolor=hexcolor,\n webcolor=webcolor,\n thumbnailUrl=thumbnailUrl,\n authorName=authorName,\n authorUrl=authorUrl,\n authorIcon=authorIcon,\n fields=fields,\n footerText=footerText,\n footerUrl=footerUrl,\n imageUrl=imageUrl\n )\n await channel.send(embed=embed)", "def get_intro_message() -> str:\n return \"\"\"You are about to begin a new record.\nType the text sample you want to record.\nThis first sample MUST be typed by the real user (no impostor data).\"\"\"", "def render_intro(self, ctx, data):\n\t\tfor key in [\"_intro\", \"description\"]:\n\t\t\tif self.service.getMeta(key, default=None) is not None:\n\t\t\t\tintroKey = key\n\t\t\t\tbreak\n\t\telse:\n\t\t\tintroKey = None\n\t\tif introKey is None:\n\t\t\treturn ctx.tag[\"\"]\n\t\telse:\n\t\t\treturn ctx.tag[T.xml(self.metaCarrier.buildRepr(introKey, \n\t\t\t\tcommon.HTMLMetaBuilder(self.macroPackage),\n\t\t\t\traiseOnFail=False))]", "def introduction_function(self):\n pass", "def add_smiley_form_set_description():\n form = AddSmileyForm({'description': 'Folded washing', 'new_description': '', 'points': 3})\n return form", "def madlib_form():\n story_id = request.args[\"story_id\"]\n story = stories[story_id]\n prompts = story.prompts\n\n return render_template(\"madlib_form.html\", story_id = story_id, title = story.title, prompts = prompts)", "def register_adhocs(self):\n aboutform = self.plugin['xep_0004'].makeForm('form', \"About SleekBot\")\n aboutform.addField('about', 'fixed', value= self.__doc__)\n self.plugin['xep_0050'].addCommand('about', 'About Sleekbot', aboutform)\n pluginform = self.plugin['xep_0004'].makeForm('form', 'Plugins')\n plugins = pluginform.addField('plugin', 'list-single', 'Plugins')\n for key in self.cmd_plugins:\n plugins.addOption(key, key)\n plugins = pluginform.addField('option', 'list-single', 'Commands')\n plugins.addOption('about', 'About')\n #plugins.addOption('config', 'Configure')\n self.plugin['xep_0050'].addCommand('plugins', 'Plugins', pluginform, self.form_plugin_command, True)", "def get_form(self):\n # setup request layer\n self.request = TestRequest()\n # get add view\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newEnsemble\")\n # update the form once to initialise all widgets\n form.update()\n # go through all widgets on the form and update the request with default values\n data = {}\n for widget in form.widgets.values():\n data[widget.name] = widget.value\n data.update({\n 'form.widgets.IDublinCore.title': u\"My EN Experiment\",\n 'form.widgets.IDublinCore.description': u'This is my experiment description',\n 'form.widgets.experiment_type': ISDMExperiment.__identifier__,\n 'form.widgets.datasets.count': '1',\n 'form.widgets.datasets.experiment.0': unicode(self.sdmexp.UID()),\n 'form.widgets.datasets.dataset.0': [unicode(self.sdmproj.UID())],\n })\n self.request.form.update(data)\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newEnsemble\")\n return form", "def display_form():\n return render_template(\"form.html\",\n title=\"Welcome Form\",\n heading=\"Please fill in this form\",)", "async def about(self, ctx):\n embed = discord.Embed(title = f\"About {self.bot.user.name}\", color = discord.Color.blurple())\n embed.set_thumbnail(url = self.bot.user.avatar_url)\n embed.add_field(name = \"Developers\", value = \"Kowlin#4417 & A Trash Coder#0981\", inline = False)\n embed.add_field(name = \"Library\", value = \"discord.py rewrite\", inline = False)\n embed.add_field(name = \"Source Code\", value = \"[Click here](https://github.com/kowlintechnologies/DHB)\", inline = False)\n embed.add_field(name = \"Links\", value = \"[Docs](https://dhb-documentation.readthedocs.io/en/latest/index.html) | [Support](https://discord.gg/KEkwrwd) | [Invite](https://discordapp.com/api/oauth2/authorize?client_id=592811241756688405&permissions=2080762998&scope=bot)\")\n await ctx.send(embed = embed)", "async def setup_embed(self):\n\n # init\n embed = Embed()\n embed.colour = 0xF54719\n\n # setting up\n if(self.title != None):\n embed.title = self.title\n \n if(self.description != None):\n embed.description = self.description\n \n if(self.colour != None):\n embed.colour = self.colour\n \n if(self.footer != None):\n embed.set_footer(text = self.footer, icon_url = self.client.user.avatar_url)\n \n else:\n embed.set_footer(text = f\"v{Bot_config.version} - {Bot_config.phase} | Credit : DrLarck & DrMegas\", icon_url = self.client.user.avatar_url)\n \n if(self.thumb != None):\n embed.set_thumbnail(url = self.thumb)\n\n embed.set_author(name = self.client.user.name, icon_url = self.client.user.avatar_url)\n \n return(embed)", "def embed():", "async def _create_embed(self, event, info):\n\n e = discord.Embed(url=info.get(\"url\"))\n e.title = \"%s %s!\" % (info.get(\"streamer\"), info.get(\"live_status\"))\n e.add_field(name=\"Stream title\", value=info.get(\"title\"), inline=False)\n e.add_field(name=\"Begin:\", value=event.begin.format(\"HH:mm:ss ZZZ\") + \" (\" + event.begin.humanize() + \")\", inline=False)\n e.add_field(name=\"Duration: \", value=str(event.duration), inline=False)\n #e.add_field(name=\"Link\", value=info.get(\"url\"), inline=False)\n e.set_image(url=info.get(\"thumbnail\") or e.Empty)\n return e", "def embed(ctx=None, title=None, description=None, fields=None, customFooter=False, customThumbnail=None, customColor=None, image=None):\n\n e = discord.Embed(title=title, description=description)\n if customColor is None:\n e.color = color()\n else:\n e.color = color(customColor)\n \n if fields != None:\n index = 0\n # Please fix the code below, There's nothing wrong with it, it's just messy and I'm sure that's not the right way to do it.\n for field in fields:\n session = []\n for key, value in field.items():\n session.append(key)\n\n if key == \"n\":\n name = value \n \n if key == \"v\":\n xValue = value \n \n if key == \"inline\":\n inline = value \n \n if not \"inline\" in session:\n inline = False\n \n e.add_field(name=f\"{name}\", value=xValue, inline=inline)\n \n if not customFooter:\n footer(e, ctx)\n \n if image is None:\n try:\n if customThumbnail is None:\n e.set_thumbnail(url=ctx.author.avatar_url)\n else:\n e.set_thumbnail(url=customThumbnail)\n except:\n pass \n else:\n e.set_image(url=image)\n return e", "def introduction(self):\n \n self.display_message('Press Space')\n\n if self.intro_played:\n \n if self.input_control == 'space':\n self.game_state = 'play_game'\n# self.update_letters_in_play()\n self.get_new_prompt()\n\n else:\n self.play_sound('press_space_when', self.standard_voice)\n self.intro_played = True", "async def faq(self, ctx):\n embed = discord.Embed(title='FAQ',\n color=self.bot.color)\n entries = {'How do I add this bot to my server?':\n 'Use `invite` or click the link in `help` (you must have Manage Server permissions).',\n 'Hey, can you add (some feature)?':\n 'Use `suggest`.',\n 'None of the commands are working!':\n 'The bot may be missing permissions or you may have been automatically blacklisted for spam. '\n 'If the problem persists, report it.',\n 'What character is that in the profile picture?':\n '[Shiro from Sewayaki Kitsune no Senko-san!](https://myanimelist.net/character/167062/Shiro)'}\n for name, value in entries.items():\n embed.add_field(name=name, value=value, inline=False)\n embed.set_footer(text='Have other questions? Join the support discord or PM me @Trackpad#1234.')\n\n await ctx.send(embed=embed)", "def intro_tag(body):\n contents = creole_parser.generate(body)\n return tag.intro(contents).generate()", "def build_initial() :\r\n titleframe = T.Frame(ROOT)\r\n TITLE = T.Label(titleframe, text = \"Welcome to Microgp!\")\r\n var = T.StringVar()\r\n INSTRUCTIONS = T.Message(titleframe, textvariable = var, width = 100)\r\n var.set(\"By Erik and Sam\")\r\n instruct_b = T.Button(titleframe, text = \"Instructions\",\r\n command = get_instructions)\r\n instruct_b.pack(side = T.BOTTOM)\r\n TITLE.pack(side = T.TOP)\r\n INSTRUCTIONS.pack(side = T.BOTTOM)\r\n titleframe.pack()", "def render_form(self, title=\"\", body=\"\", error=\"\"):\n self.render(\"newpost.html\", title=title, body=body, error=error)", "def reply_embed(self, message: str):\n embed = discord.Embed(color=discord.Color.blurple())\n embed.title = \"\"\n embed.description = message\n return embed", "async def seal_embed(self): \n\n if self.embed_message is not None:\n\n self.embed.title = f\"Played: {self.current_level.name}\"\n self.embed.color = discord.Color.default()\n\n self.embed.clear_fields()\n\n self.embed.add_field(name=\"Attempts:\",value=self.current_level.attempts,inline=True)\n self.embed.add_field(name=\"Total Attempts:\",value=self.current_level.attempts,inline=True)\n self.embed.add_field(name=\"Total Jumps:\",value=self.current_level.jumps,inline=True)\n self.embed.add_field(name=\"Best Session %:\",value=f\"{self.session.best}%\\n{self._get_progress_bar(self.session.best)}\",inline=False)\n self.embed.add_field(name=\"Best Lifetime %:\",value=f\"{self.current_level.best_percent}%\\n{self._get_progress_bar(self.current_level.best_percent)}\",inline=False)\n\n await self.embed_message.edit(embed=self.embed)\n self.embed = discord.Embed()", "def intro_page():\n logger.info('At introductory app page.')\n return render_template('first.html')", "def create_form_html():\n data_file = os.path.join('data', 'data.csv')\n data = pd.read_csv(data_file, index_col=0)\n example1 = data.iloc[0, :178]\n example2 = data.iloc[4340, : 178]\n placeholder = ', '.join(example1.astype(str))\n example_str1 = textwrap.fill(placeholder, 80)\n example_str2 = textwrap.fill(', '.join(example2.astype(str)), 80)\n form_html = ('''\n <html><body>\n <h1>Binary classifier for Epileptic Seizure Recognition Data \n Set</h1>\n <h2>Please enter features for classification</h1>\n (178 integers, separated by commas)\n <form method=\"post\" action=\"\">\n <textarea name=\"query\" cols=\"80\" rows=\"10\">'''\n + placeholder\n + ''' </textarea>\n <input type=\"submit\">\n </form>\n <p> Example non-seizure data point:\n '''\n + example_str1\n + '''<p> Example seizure data point: '''\n + example_str2\n + '''</body></html>''')\n return form_html", "def introduction(self, id, names=None):\n method.apply_method(self, {\n 'method_name': 'introduction',\n 'goal_id': id, 'fact_ids': [], 'names': names\n })", "def get_intro_message(self):\n self.speak_dialog(\"thank.you\")\n return None", "def intro():\r\n\r\n print(term.home + term.on_white + term.clear)\r\n Joueur_1 = input(term.black + 'Pseudo joueur 1 : ')\r\n Joueur_2 = input(term.black + 'Pseudo joueur 2 : ')\r\n print (term.black + 'Que le meilleur gagne!')\r\n print (term.black + '**********************')", "def intro():\n os.system('cls')\n print(\"-------------------------\")\n print(\" MOON PHASE CALENDAR\")\n print(\"-------------------------\")", "def new():\n data = {}\n bai = request.values.get('bai', u\"\", type=unicode)\n if bai:\n # Add BAI information to form in order to keep connection between\n # a HEPName and an author profile.\n data[\"bai\"] = bai\n\n form = AuthorUpdateForm(data=data)\n ctx = {\n \"action\": url_for('.submitnew'),\n \"name\": \"authorUpdateForm\",\n \"id\": \"authorUpdateForm\",\n }\n\n return render_template('authors/forms/new_form.html', form=form, **ctx)", "def introduction_html(self) -> str:\n if self._introduction_md is None:\n return self._introduction_html\n else:\n return mistune.markdown(self._introduction_md)", "def addPublication():\n preloaded = [\n {\"description\": \"bortaS <b>bIr</b> jablu'DI' reH QaQqu' nay'!\"},\n {\"language\": \"en\"},\n {\"country\": \"usa\"}\n ]\n return render_template(\"addPublication.html\", msg=\"\", preloaded=preloaded)", "def error_embed(self, message: str):\n embed = discord.Embed(color=discord.Color.red())\n embed.title = \"\"\n embed.description = message\n return embed", "def add():\n form = forms.JournalForm()\n if form.validate_on_submit():\n models.Journal.create(\n title=form.title.data,\n date=form.date.data,\n time_spent=form.time_spent.data,\n learnt=form.learnt.data,\n resources=form.resources.data)\n flash('Entry has been created', 'success')\n return redirect(url_for('index'))\n return render_template('add.html', form=form)", "def create_dummy_form(title,text,fill_choice=[],choice_length=[]):\n # fill it with blank for dummy choices\n count=0\n choices=[]\n while count < 8:\n choices.append(None)\n count+=1\n \n # fill choices based on value on fill_choice\n for i in fill_choice:\n try :\n length = choice_length[i]\n except IndexError :\n length = 10\n choices[i] = create_random_string(length)\n\n dummy_form=CreatePollQuestion(\n {\"question_title\":title,\n \"question_text\" :text,\n \"choice_1\":choices[0],\n \"choice_2\":choices[1],\n \"choice_3\":choices[2],\n \"choice_4\":choices[3],\n \"choice_5\":choices[4],\n \"choice_6\":choices[5],\n \"choice_7\":choices[6],\n \"choice_8\":choices[7],\n })\n\n return dummy_form", "def consentForm(request, experiment_id):\n experiment = get_object_or_404(Experiment, pk=experiment_id)\n form = ConsentForm(experiment=experiment)\n t = Template(experiment.introduction_page_tpl)\n c = RequestContext(request, {'consent_form': form, 'experiment': experiment})\n return HttpResponse(t.render(c))", "def CreatePresentation(self, event):\n pass", "def prepare_embed(self, entry: dict, page: int, *, first: bool = False) -> None:\n if self.maximum_pages > 1:\n title = f'{entry[\"word\"]}: {page} out of {self.maximum_pages}'\n else:\n title = entry[\"word\"]\n\n self.embed = e = discord.Embed(colour=0xE86222, title=title, url=entry[\"permalink\"])\n e.set_footer(text=f'Author : {entry[\"author\"]}')\n e.description = self.cleanup_definition(entry[\"definition\"])\n\n try:\n date = discord.utils.parse_time(entry[\"written_on\"][0:-1])\n except (ValueError, KeyError):\n pass\n else:\n e.timestamp = date", "def create_message_embed(self, data):\n\n # get the language object\n lang = data.bot.lang\n\n embed = self.generate_embed()\n\n if lang == \"en\":\n help_text = \"Role Selection\"\n\n elif lang == \"de\":\n help_text = \"Rollenvergabe\"\n\n embed.add_field(name=help_text, value=data.message_text, inline=False)\n\n for role in data.roles.all():\n embed.add_field(name=role.name, value=role.emoji, inline=False)\n\n return embed", "def intro(self, community, now):\n self._get_or_create_timestamps(community).last_intro = now", "def intro():\n introAnswer = input(\n \"Would you like to board the story telling journey?: Yes or no?: \")\n if introAnswer == \"yes\" or introAnswer == \"Yes\":\n print(\"\\n\")\n print(\"GREAT! Let us begin!\")\n print(\"\")\n choose_story()\n elif introAnswer == \"no\" or introAnswer == \"No\":\n print(\"\")\n print(\"How unfortunate! Very well my friend, farewell and stay swell\",\n name + '!')\n else:\n print(\"\")\n print(\"Invalid entry. Please try again.\")\n print(\"\")\n intro()", "def create(self, section, text):\n payload = {}\n payload['introeditor[text]'] = text\n return self._create(section, payload)", "def Wraith_Form(self):\t\t\n\t\tprint(self.name.Title() + \"Wraith\")", "def meme_form():\n return render_template('meme_form.html')", "def meme_form():\n return render_template('meme_form.html')", "def display_project(self):\n form = self.form\n if form:\n msg = \"|wEvent you're creating:|n\\n\" + form.display()\n else:\n msg = \"|wYou are not currently creating an event.|n\"\n self.msg(msg, options={\"box\": True})", "def get_form(self):\n # setup request layer\n self.request = TestRequest()\n # get add view\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newBiodiverse\")\n # update the form once to initialise all widgets\n form.update()\n # go through all widgets on the form and update the request with default values\n data = {}\n for widget in form.widgets.values():\n data[widget.name] = widget.value\n data.update({\n 'form.widgets.IDublinCore.title': u\"My BD Experiment\",\n 'form.widgets.IDublinCore.description': u'This is my experiment description',\n 'form.widgets.projection.count': '1',\n 'form.widgets.projection.experiment.0': unicode(self.sdmexp.UID()),\n 'form.widgets.projection.dataset.0.count': 1,\n 'form.widgets.projection.dataset.0.0.uuid': unicode(self.sdmproj.UID()),\n 'form.widgets.projection.dataset.0.0.threshold': '0.0',\n 'form.widgets.cluster_size': '5000',\n })\n self.request.form.update(data)\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newBiodiverse\")\n return form", "def show_form():\n\n story_title = request.args[\"madlib\"]\n for story in stories.values():\n if story.title == story_title:\n story_for_form = story\n \n return render_template('form.html', s=story_for_form, story_title=story_title)", "def introduction(state_object, nlg_object):\n # intents = [\"greet\", \"goodbye\", \"deny\", \"exclaim_neg\", \"clarification_request\", \"ask_if_ended\", \"feedback_prompt\"]\n if state_object.previous_intent == \"\":\n templates = nlg_object.first_templates\n return random.choice(templates[\"intro\"])\n else:\n if state_object.previous_intent == \"goodbye\" or state_object.previous_intent == \"deny\" \\\n or state_object.previous_intent == \"exclaim_neg\":\n templates = nlg_object.goodbye_templates\n return random.choice(templates[\"goodbye\"])\n else:\n templates = nlg_object.intro_templates\n return random.choice(templates[state_object.intent])", "def form_plugin_command(self, form, sessid):\n value = form.getValues()\n option = value['option']\n plugin = value['plugin']\n if option == 'about':\n aboutform = self.plugin['xep_0004'].makeForm('form', \"About SleekBot\")\n aboutform.addField('about', 'fixed', value=getattr(self.cmd_plugins[plugin], 'about', self.cmd_plugins[plugin].__doc__))\n return aboutform, None, False\n elif option == 'config':\n pass", "def showAbout(self):\n about = QtGui.QWidget()\n\n layout = QtGui.QVBoxLayout()\n\n labelTitle = QtGui.QLabel(\"Electronic Wallpaper 2.0\")\n layout.addWidget(labelTitle)\n\n labelDescription = QtGui.QLabel(\"This software is designed to create new video editing experience. Etc...\")\n layout.addWidget(labelDescription)\n\n labelContact = QtGui.QLabel(\"For any question please contact : cecilia.lejeu@gmail.com\")\n layout.addWidget(labelContact)\n\n layout.setStretchFactor(labelTitle, 3)\n layout.setStretchFactor(labelDescription, 1)\n layout.setStretchFactor(labelContact, 3)\n\n about.setLayout(layout)\n about.show()", "def new_from_post():\n # If you make a post request with a question_id we will assume you want a new question editor\n # we will prepopulate the question new page with data from that question (if it is a valid question id)\n question_id = request.form['question_id'] if request.form['question_id'] else ''\n\n return render_template('questionNew.html', question_id=question_id)", "def create_about():\n meta_desc = (\n 'Expected values and probability per lap of step-up banners'\n ' in Final Fantasy Brave Exvius (FFBE)')\n template_vars = {\n 'title' : 'About - ' + sitesettings.SITE_NAME,\n 'siteurl' : sitesettings.SITE_URL,\n 'sitename' : sitesettings.SITE_NAME,\n 'meta_desc' : meta_desc,\n 'last_four_banners' : nav.get_last_four_banners('all'),\n }\n\n about_path = os.path.join(sitesettings.LOCAL_FILE_PATH, 'about')\n\n if not os.path.exists(about_path):\n os.makedirs(about_path)\n\n template_file = 'about.html'\n html_file_loc = os.path.join(about_path, 'index.html')\n generatehtml.generate_html(\n html_file_loc, template_file, template_vars, os.path.join(os.getcwd(), 'templates'))", "async def about(self, ctx):\n self.log_command_call(\"about\", ctx.message)\n embed = create_embed(description=ABOUT_TEXT)\n await ctx.send(embed=embed)", "async def info(self, ctx):\n\n level = await self.get_player_level(ctx.author)\n embed = discord.Embed()\n embed.colour = discord.Colour.blurple()\n embed.set_author(name=str(ctx.author), icon_url=ctx.author.avatar_url)\n\n embed.title = f'Your current level : {level}'\n\n embed.add_field(name='Question', value=f'{self.enigmas[level][\"question\"]}')\n\n embed.set_footer(text='I love Ducks')\n\n await ctx.send(embed=embed)", "def make_form(self):", "def show_form():\n\n return render_template(\"form.html\")", "async def _info(self, ctx: Context):\n\n embed = discord.Embed(colour=await ctx.embed_colour())\n\n perm_int = discord.Permissions(268494928)\n\n data = await self.bot.application_info()\n invite_url = discord.utils.oauth_url(data.id, permissions=perm_int)\n\n embed.description = (\n \"TvM Assistant is a Discord bot with utility commands to make hosting TvMs easier.\"\n \"\\n\\nSome of the bot features include:\"\n \"\\n\\n- Setup roles and channel creation\"\n \"\\n- Management of sign-ups, sign-outs, spectators and replacements\"\n \"\\n- In-built logging to detect and ignore private channels\"\n \"\\n- Quick creation of player, mafia and spectator chats\"\n \"\\n- Vote counts and time since day/night started\"\n )\n\n links = (\n f\"\\n- [Invite to your server]({invite_url})\"\n f\"\\n- [Quickstart]({QUICKSTART})\"\n f\"\\n- [Commands Reference]({COMMANDS_REFERENCE})\"\n f\"\\n- [Source Code]({SOURCE_CODE})\"\n )\n\n embed.add_field(name=\"\\u200b\\nQuick Links\", value=links)\n embed.set_author(name=f\"About {ctx.me.name}\", icon_url=ctx.me.avatar_url)\n\n await ctx.send(embed=embed)", "def renderEdit(self):\n if self.discussion.isNone:\n return \"\"\n html = common.textInput(\"topic\" + self.id, self.discussion.topic)\n html += common.elementInstruc(self.discussion.instruc)\n html += u\"<br/>\\n\"\n html += common.richTextArea(\"dIntro\" + self.id, self.discussion.intro)\n return html", "def display_form():\n\n return render_template('add_new_student.html')", "def show_add_actor(self):\n\t\tformulario = view_form_actor.Form(self)\n\t\tformulario.exec_()\n\t\tself.load_data()", "def get_form(self):\n # setup request layer\n self.request = TestRequest()\n # get add view\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newProjection\")\n # update the form once to initialise all widgets\n form.update()\n # go through all widgets on the form and update the request with default values\n data = {}\n for widget in form.widgets.values():\n data[widget.name] = widget.value\n data.update({\n 'form.widgets.IDublinCore.title': u\"My CC Experiment\",\n 'form.widgets.IDublinCore.description': u'This is my experiment description',\n 'form.widgets.species_distribution_models': unicode(self.sdmexp.UID()),\n 'form.widgets.species_distribution_models.model': [unicode(self.sdmmodel.UID())],\n 'form.widgets.future_climate_datasets': [unicode(self.future.UID())]\n })\n self.request.form.update(data)\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newProjection\")\n return form", "async def about(self, ctx):\n embed = Embed(color=self.bot.main_color, timestamp=datetime.utcnow())\n embed.set_author(\n name=\"Modmail - About\",\n icon_url=self.bot.user.avatar_url,\n url=\"https://discord.gg/F34cRU8\",\n )\n embed.set_thumbnail(url=self.bot.user.avatar_url)\n\n desc = \"This is an open source Discord bot that serves as a means for \"\n desc += \"members to easily communicate with server administrators in \"\n desc += \"an organised manner.\"\n embed.description = desc\n\n embed.add_field(name=\"Uptime\", value=self.bot.uptime)\n embed.add_field(name=\"Latency\", value=f\"{self.bot.latency * 1000:.2f} ms\")\n embed.add_field(name=\"Version\", value=f\"`{self.bot.version}`\")\n embed.add_field(name=\"Author\", value=\"[`kyb3r`](https://github.com/kyb3r)\")\n\n changelog = await Changelog.from_url(self.bot)\n latest = changelog.latest_version\n\n if parse_version(self.bot.version) < parse_version(latest.version):\n footer = f\"A newer version is available v{latest.version}\"\n else:\n footer = \"You are up to date with the latest version.\"\n\n embed.add_field(\n name=\"GitHub\", value=\"https://github.com/kyb3r/modmail\", inline=False\n )\n\n embed.add_field(\n name=\"Discord Server\", value=\"https://discord.gg/F34cRU8\", inline=False\n )\n\n embed.add_field(\n name=\"Donate\",\n value=\"Support this bot on [`Patreon`](https://patreon.com/kyber).\",\n )\n\n embed.set_footer(text=footer)\n await ctx.send(embed=embed)", "def create_wizard(mod):\n wiz = WC.Wizard(u\"Isotropic linear elastic study\", mod)\n WC.add_model_page(wiz, [\n WC.Mode3D,\n WC.PlaneStress,\n WC.PlaneStrain,\n WC.AxisSymmetric,\n ])\n\n exp_store = WC.ExpStore()\n exp_store.register(WC.ExpStore.smesh, SMeshExp())\n exp_store.register(WC.ExpStore.geom, GeomExp())\n WC.add_mesh_page(wiz, mod, exp_store)\n title = u\"Young's modulus and Poisson ratio definitions\"\n WC.add_material_page(wiz, title, [\n WC.YoungModulus(),\n WC.PoissonRatio(),\n ])\n WC.add_boundaries_page(wiz)\n add_pressure_page(wiz)\n WC.add_command_file_page(wiz, FinalPage(mod))\n return wiz", "def introduction():\r\n intro_answer = input(\"Would you like to play a game? \\nType yes or no? \\n\")\r\n if intro_answer == \"yes\":\r\n print(\"Welcome to Duck Hunt Where you hunt ducks\")\r\n elif intro_answer == \"no\":\r\n print(\"Welcome to Duck Hunt Any Way!\")\r\n else:\r\n print(\"Incorrect input. Please answer by typing yes or no\")\r\n introduction()", "def intro(self):\n return self['intro']", "def def_to_embed(dfn):\n embed = discord.Embed(\n title=dfn[\"meta\"][\"id\"].split(\":\")[0],\n description=dfn[\"hwi\"].get(\"prs\", ({},))[0].get(\"mw\", \"\"),\n type=\"rich\",\n )\n\n text = []\n for d in dfn[\"def\"]:\n senses = get_all_senses(d)\n for _, sense in senses:\n dt = sense[\"dt\"]\n dt_text = [txt for (kw, txt) in dt if kw == \"text\"][0]\n if (\n \"sn\" in sense and sense[\"sn\"][0].isdigit() and text\n ): # starts new definition\n embed.add_field(name=\"Definition\", value=\"\\n\".join(text), inline=False)\n text = []\n text.append(\"**\" + sense.get(\"sn\", \"1\") + \"** \" + mw_to_markdown(dt_text))\n\n if text:\n embed.add_field(name=\"Definition\", value=\"\\n\".join(text), inline=False)\n\n try:\n et = mw_to_markdown(dfn['et'][0][1])\n embed.add_field(name=\"Etymology\", value=et)\n\n except KeyError as e:\n logging.error(e)\n\n return embed", "def hlp(self):\r\n \r\n self.menubar.entryconfig(\"Help\", state = 'disabled')\r\n self.menubar.entryconfig(\"File\", state = 'disabled')\r\n self.hlpframe = LabelFrame(self.master, text = \"Help & About\", font = ('impact', 10), width = 580, height = 662)\r\n self.hlpframe.place(x = 5, y = 5)\r\n self.abt = LabelFrame(self.hlpframe, width = 566, height = 111)\r\n self.abt.place(x = 5)\r\n self.abtproj = Label(self.abt, text = \"A Program for citating web pages using APA 6th edition, commonly used in social sciences:\")\r\n self.abtproj.place(x=35, y = 20)\r\n self.projtitle = Label(self.abt, text = \"APA 6th Edition Web Citation Generator\", font = ('impact', 25))\r\n self.projtitle.place(x = 16, y = 40)\r\n self.how2use = LabelFrame(self.hlpframe, text = \"How to use?\", font = ('impact', 8), width = 566, height = 400)\r\n self.how2use.place(x = 5, y = 110)\r\n self.inst1 = Label(self.how2use, text = \"โ—พ Enter the required details first and click \\\"Create\\\":\", justify = LEFT)\r\n self.inst1.place(x = 1, y = 1)\r\n self.inst1auth = Label(self.how2use, text = \"โ–ธ You can choose if the article's author is one or more author or a group of authors.\", justify = LEFT)\r\n self.inst1auth.place(x = 25, y = 20)\r\n self.inst1auth2 = Label(self.how2use, text = \"โ–ธ By adding another author, enter the name of the first author to activate the \\\"+\\\" button to add \\n another one.\", justify = LEFT)\r\n self.inst1auth2.place(x=25, y = 40)\r\n self.inst1dates = Label(self.how2use, text = \"โ–ธ Click the dropdown menus of Months, Days, and Years to choose for the article's date of \\n publication and the date of when did you retrieved it.\",justify = LEFT)\r\n self.inst1dates.place(x=25, y= 75)\r\n self.inst1datestip = Label(self.how2use, text = \"โ–ธ If the article does not have a date of publication, just leave it blank or you can set it to none, \\n same goes for retrieval date if you don't want to include it.\",justify = LEFT)\r\n self.inst1datestip.place(x = 25, y = 113)\r\n self.inst2edit = Label(self.how2use, text = \"โ—พ In Editable result you can make changes on your created citation:\", justify = LEFT)\r\n self.inst2edit.place(x = 1, y = 160)\r\n self.inst2conf = Label(self.how2use, text = \"โ–ธ Click \\\"Confirm\\\" to save changes and move on to Final result or \\\"Discard\\\" to go back to creating \\n a new citation.\",justify = LEFT)\r\n self.inst2conf.place(x=25, y = 180)\r\n self.inst3final = Label(self.how2use, text = \"โ—พ The Final result adds indents and handles the citations alphabetically:\",justify = LEFT)\r\n self.inst3final.place(x = 1, y = 225)\r\n self.inst3add = Label(self.how2use, text = \"โ–ธ To add another citation to the reference list click \\\"Add another citation!\\\".\",justify = LEFT)\r\n self.inst3add.place(x = 25, y = 245)\r\n self.inst3dis = Label(self.how2use, text = \"โ–ธ If you clicked \\\"Clear all\\\" it'll prompt you to clear the reference list, once confirmed it cannot \\n be undone.\", justify = LEFT)\r\n self.inst3dis.place(x = 25, y = 265)\r\n self.inst4sav = Label(self.how2use, text = \"โ—พ Saving the reference list by \\\".txt\\\" or \\\".doc\\\" type:\",justify = LEFT)\r\n self.inst4sav.place(x = 1, y = 310)\r\n self.inst4how = Label(self.how2use, text = \"โ–ธ You can save the reference list by going to \\\"File\\\", \\\"Save as...\\\".\", justify=LEFT)\r\n self.inst4how.place(x = 25, y = 330)\r\n self.inst4note = Label(self.how2use, text = \"โ–ธ Once the file has been saved, you can't make changes on it using the program.\", justify = LEFT)\r\n self.inst4note.place(x = 25, y = 350)\r\n self.lbl = LabelFrame(self.hlpframe, width = 566, height = 55)\r\n self.lbl.place(x = 5, y = 515)\r\n self.built = Label(self.lbl, text = \"Built using Python\", font = 'impact')\r\n self.built.place(x = 225)\r\n self.jp = Label(self.lbl, text = \"John Paul G. Zoleta Bachelor of Science in Computer Science College of Mary Immaculate\")\r\n self.jp.place(x =25, y = 25)\r\n\r\n self.ok = Button(self.hlpframe, text = \"Okay โŒโ– _โ– \",activebackground = 'gray', command = self.hlpframeclear, width = 79, height = 3)\r\n self.ok.place(x = 6, y = 575)", "def newExperiment(self):\n experiment = Experiment()\n newtitle = 'Untitled ' + self.getNextUntitled()\n experimentFrame = SequenceFrame(self, experiment, True, newtitle)\n experiment.setInteractionParameters(parentFrame=experimentFrame,\n graphManagerClass=StandardGraphManager)\n self.frames.append(experimentFrame)\n self.names.append(newtitle)\n log.info('Created experiment ' + newtitle)\n experimentFrame.Show()\n testFrame = tf.TestingFrame(experimentFrame, experiment)\n testFrame.Show()\n self.Show(False)", "def _create_content(self, response_text, submit_url=None):\r\n if submit_url:\r\n submit_form = textwrap.dedent(\"\"\"\r\n <form action=\"{submit_url}/grade\" method=\"post\">\r\n <input type=\"submit\" name=\"submit-button\" value=\"Submit\">\r\n </form>\r\n <form action=\"{submit_url}/lti2_outcome\" method=\"post\">\r\n <input type=\"submit\" name=\"submit-lti2-button\" value=\"Submit\">\r\n </form>\r\n <form action=\"{submit_url}/lti2_delete\" method=\"post\">\r\n <input type=\"submit\" name=\"submit-lti2-delete-button\" value=\"Submit\">\r\n </form>\r\n \"\"\").format(submit_url=submit_url)\r\n else:\r\n submit_form = ''\r\n\r\n # Show roles only for LTI launch.\r\n if self.post_dict.get('roles'):\r\n role = '<h5>Role: {}</h5>'.format(self.post_dict['roles'])\r\n else:\r\n role = ''\r\n\r\n response_str = textwrap.dedent(\"\"\"\r\n <html>\r\n <head>\r\n <title>TEST TITLE</title>\r\n </head>\r\n <body>\r\n <div>\r\n <h2>IFrame loaded</h2>\r\n <h3>Server response is:</h3>\r\n <h3 class=\"result\">{response}</h3>\r\n {role}\r\n </div>\r\n {submit_form}\r\n </body>\r\n </html>\r\n \"\"\").format(response=response_text, role=role, submit_form=submit_form)\r\n\r\n # Currently LTI module doublequotes the lis_result_sourcedid parameter.\r\n # Unquote response two times.\r\n return urllib.unquote(urllib.unquote(response_str))", "def create_overlay(\n nom_act: str = \"nombre de la actividad\",\n desc: str = \"descripciรณn\", # max 90 caracteres\n data_ini: str = \"fecha de inicio\",\n data_fin: str = \"fecha de finalizaciรณn\",\n h11: str = \"12:00\",\n h12: str = \"19:00\",\n h21: str = \"15:00\",\n h22: str = \"18:00\",\n nombre: str = \"Joan\",\n nif: str = \"47182736 N\",\n tit: str = \"profesor\",\n):\n c = canvas.Canvas(\"static/pdfs/simple_form_overlay.pdf\")\n\n c.drawString(170, 421, nom_act)\n c.drawString(50, 358, desc)\n c.drawString(176, 311, \"X\") # acte obert public\n c.drawString(64, 248, \"X\") # act puntual\n c.drawString(424, 252, data_ini)\n c.drawString(424, 236, data_fin)\n c.drawString(450, 210, h11)\n c.drawString(500, 210, h12)\n c.drawString(450, 185, h21)\n c.drawString(500, 185, h22)\n c.drawString(50, 118, nombre)\n c.drawString(400, 118, nif)\n c.drawString(50, 90, tit)\n\n c.save()", "async def sayembed(self, ctx, text_channel: typing.Union[discord.TextChannel, str] = None, *, embed_format=None):\n embed_creator_url = \"https://embedbuilder.nadekobot.me/\"\n if isinstance(text_channel, str):\n if isinstance(embed_format, str):\n embed_format = text_channel + embed_format\n text_channel = ctx.channel\n try:\n if not embed_format or not text_channel:\n return await ctx.send(f\"> **This command follows the format from {embed_creator_url}**\")\n else:\n author_name = None\n author_icon_url = None\n embed_footer_text = None\n embed_footer_url = None\n embed_format = json.loads(embed_format)\n embed_image = embed_format.get('image')\n embed_footer = embed_format.get('footer')\n embed_thumbnail = embed_format.get('thumbnail')\n embed_author = embed_format.get('author')\n if embed_author:\n author_name = embed_author.get(\"name\")\n author_icon_url = embed_author.get(\"icon_url\")\n if embed_footer:\n embed_footer_text = embed_footer.get('text')\n embed_footer_url = embed_footer.get('icon_url')\n author_url = embed_format.get('url')\n\n if author_icon_url or author_url:\n embed_format.pop('author')\n if embed_footer_url:\n embed_format.pop('footer')\n if embed_image:\n embed_format.pop('image')\n if embed_thumbnail:\n embed_format.pop('thumbnail')\n\n embed = discord.Embed.from_dict(embed_format)\n\n if embed_image:\n embed.set_image(url=embed_image)\n if embed_footer_url:\n embed.set_footer(text=embed_footer_text, icon_url=embed_footer_url)\n if embed_thumbnail:\n embed.set_thumbnail(url=embed_thumbnail)\n if author_url and author_icon_url:\n embed.set_author(name=author_name, url=author_url, icon_url=author_icon_url)\n elif not author_icon_url and author_url:\n embed.set_author(name=author_name, url=author_url)\n elif not author_url and author_icon_url:\n embed.set_author(name=author_name, icon_url=author_icon_url)\n\n plain_body = embed_format.get('plainText')\n if plain_body:\n return await text_channel.send(plain_body, embed=embed)\n else:\n return await text_channel.send(embed=embed)\n except Exception as e:\n await ctx.send(f\"ERROR - {e}.\\nFollow the format from {embed_creator_url}\")\n log.console(e)", "def _help_embed(bot: util.CustomBot, index: int) -> discord.Embed:\r\n if index == 0: # first page -> general information\r\n embed = discord.Embed(\r\n title=f'Help page {index + 1}\\nGeneral information',\r\n description=f'Some bot\\'s description I can\\'t think of now'\r\n f'\\n\\nBot\\'s prefix: \\'{bot.config[\"prefix\"]}\\''\r\n f'\\n\\nAuthors:'\r\n '\\n- Astolfo_for_life (original bot)'\r\n '\\n- TrapinchO (original and new bot)',\r\n colour=bd.embed_colors['info'],\r\n )\r\n embed.set_thumbnail(url=bot.config['embed_image_url'])\r\n return embed\r\n\r\n all_cogs = util.get_cog_name_list(bot)\r\n cog_name = all_cogs[index-1] # get the cog's name\r\n\r\n embed = discord.Embed( # change the embed\r\n title=f'Help page {index + 1}\\nCategory: {cog_name}',\r\n description=_help_cog(bot.get_cog(cog_name)), # get the cog help\r\n colour=bd.embed_colors['info']\r\n )\r\n embed.set_thumbnail(url=bot.config['embed_image_url'])\r\n return embed", "def inscription():\n f = None\n f = InscriptionForm()\n return render_template(\n \"inscription.html\",\n form = f,\n title = \"Inscription\")", "def create_embedding(skills):\n corpus = list(skills[\"description\"].values)\n embedder = SentenceTransformer(config[\"sentence_transformer\"][\"model\"])\n embedding = embedder.encode(corpus, show_progress_bar=True)\n return embedding", "def intro():\n print(\" ___ _ _ _ ____ \")\n print(\"|_ _|_ __ __| (_) __ _ _ __ __ _ | | ___ _ __ ___ ___ |___ \\\\ \")\n print(\" | || '_ \\\\ / _` | |/ _` | '_ \\\\ / _` | _ | |/ _ \\\\| '_ \\\\ / _ \\\\/ __| __) |\")\n print(\" | || | | | (_| | | (_| | | | | (_| | | |_| | (_) | | | | __/\\\\__ \\\\ / __/ \")\n print(\"|___|_| |_|\\\\__,_|_|\\\\__,_|_| |_|\\\\__,_| \\\\___/ \\\\___/|_| |_|\\\\___||___/ |_____|\")\n print('and his Great Python Adventure'.center(80))\n print()", "def practices_create():\n practice = Practice()\n form = PracticeCreateForm()\n if form.validate_on_submit():\n\n form.populate_obj(practice)\n db.session.add(practice)\n db.session.commit()\n return redirect(url_for('practices.home'))\n return render_template('practices/create.html', form=form)", "def get_form(self):\n # setup request layer\n self.request = TestRequest()\n # get add view\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newSpeciesTraits\")\n # update the form once to initialise all widgets\n form.update()\n # go through all widgets on the form and update the request with default values\n data = {}\n for widget in chain(\n form.widgets.values(),\n # skip standard plone groups\n #chain.from_iterable(g.widgets.values() for g in form.groups),\n chain.from_iterable(g.widgets.values() for g in form.param_groups)):\n data[widget.name] = widget.value\n data.update({\n 'form.widgets.IDublinCore.title': u\"My ST Experiment\",\n 'form.widgets.IDublinCore.description': u'This is my experiment description',\n 'form.widgets.algorithm': [self.algorithm.UID()],\n 'form.widgets.formula': u'Z ~ X + Y',\n 'form.widgets.data_table': [unicode(self.traitsds.UID())]\n })\n self.request.form.update(data)\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newSpeciesTraits\")\n return form", "def submitnew():\n form = AuthorUpdateForm(formdata=request.form)\n visitor = DataExporter()\n visitor.visit(form)\n\n workflow_object = workflow_object_class.create(\n data={},\n id_user=current_user.get_id(),\n data_type=\"authors\"\n )\n workflow_object.extra_data['formdata'] = copy.deepcopy(visitor.data)\n workflow_object.data = formdata_to_model(workflow_object, visitor.data)\n workflow_object.save()\n db.session.commit()\n\n # Start workflow. delayed=True will execute the workflow in the\n # background using, for example, Celery.\n start.delay(\"author\", object_id=workflow_object.id)\n\n ctx = {\n \"inspire_url\": get_inspire_url(visitor.data)\n }\n\n return render_template('authors/forms/new_success.html', **ctx)", "def show_add_feedback(username):\n\n if \"username\" not in session or username != session['username']:\n flash(\"You do not have permission to view this content.\")\n return redirect(\"/\")\n else:\n form = AddFeedbackForm()\n \n \n \n if form.validate_on_submit():\n title = form.title.data\n content = form.content.data\n \n \n post = Feedback(title=title, content=content, username=username)\n db.session.add(post)\n db.session.commit()\n flash(f\"Feedback Posted!\", \"success\")\n return redirect(f\"/users/{username}\")\n \n else:\n \n return render_template(\n \"add_feedback.html\", form=form)", "def add_Capteur():\n f = None\n f = CapteurForm()\n return render_template(\n \"addCapteur.html\",\n form = f,\n title = \"Nouveau capteur\",\n param = \"create\")", "def show_add_student_form():\n\n return render_template(\"add_student_form.html\")", "def get_form(self):\n # setup request layer\n self.request = TestRequest()\n # get add view\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newSpeciesDistribution\")\n # update the form once to initialise all widgets\n form.update()\n # go through all widgets on the form and update the request with default values\n data = {}\n for widget in chain(\n form.widgets.values(),\n # skip standard plone groups\n #chain.from_iterable(g.widgets.values() for g in form.groups),\n chain.from_iterable(g.widgets.values() for g in form.param_groups)):\n data[widget.name] = widget.value\n data.update({\n 'form.widgets.IDublinCore.title': u\"My Experiment\",\n 'form.widgets.IDublinCore.description': u'This is my experiment description',\n 'form.widgets.functions': [self.algorithm.UID()], # BIOCLIM\n 'form.widgets.species_occurrence_dataset': [unicode(self.occur.UID())], # ABT\n 'form.widgets.species_absence_dataset': [unicode(self.absen.UID())],\n 'form.widgets.species_pseudo_absence_points': [],\n 'form.widgets.resolution': ('Resolution2_5m', ),\n # FIXME: shouldn't be necessary to use unicode here,... widget converter should take care of it\n 'form.widgets.environmental_datasets.item.0': unicode(self.current.UID()),\n 'form.widgets.environmental_datasets.item.0.item': [u'B01'],\n 'form.widgets.environmental_datasets.item.1': unicode(self.current.UID()),\n 'form.widgets.environmental_datasets.item.1.item': [u'B02'],\n 'form.widgets.environmental_datasets.count': '2',\n })\n self.request.form.update(data)\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newSpeciesDistribution\")\n return form", "def addDemographics(self):\n p = self.p\n demographics_data = {\n 'dob': p.dob,\n 'gender': p.gender,\n 'email': p.email,\n 'fname': p.fname,\n 'lname': p.lname,\n 'hphone': p.home,\n 'cphone': p.cell,\n 'country': p.country,\n 'city': p.city,\n 'pcode': p.pcode,\n 'region': p.region,\n 'street': p.street,\n }\n self.demographics_doc = DEMOGRAPHICS.sub(demographics_data).done()", "def add_talk(self):\r\n presentation = self.create_presentation(self.newTalkWidget.talkDetailsWidget)\r\n\r\n if presentation:\r\n self.db.insert_presentation(presentation)\r\n self.newTalkWidget.accept() # Close the dialog\r", "def show_new_tag_form():\n\n return render_template('create_tag.html')", "async def intro_step(step_context: WaterfallStepContext) -> DialogTurnResult:\n\n # Ask what to do\n message = (\n str(step_context.options)\n if step_context.options\n else \"What can I help you with today?\"\n )\n\n # TextPromp - How can I help you ?\n return await step_context.prompt(\n \"ActPrompt\",\n PromptOptions(\n prompt=MessageFactory.text(message)\n ),\n )", "def __init__(self, form: dict, help: str, parent=None, is_multi_cif=False):\n super().__init__(parent)\n self.is_multi_cif = is_multi_cif\n self.setParent(parent)\n self.form = form\n # self.setMinimumWidth(400)\n self.mainVLayout = QVBoxLayout(self)\n self.setLayout(self.mainVLayout)\n # self.setStyleSheet('QWidget { border: 2px solid black }')\n self.mainVLayout.setContentsMargins(0, 0, 0, 0)\n self.mainVLayout.setSpacing(0)\n self.mainVLayout.addWidget(QHLine())\n # The button to get help for the respective alert:\n self.helpbutton = QPushButton('Help')\n self.helpbutton.clicked.connect(self.show_help)\n self.response_text_edit = QTextEdit()\n self.alert_label_box()\n self.problem_label_box()\n self.response_label_box()\n self.setAutoFillBackground(False)\n self.help = help\n #\n self.show()", "def _error_embed_helper(title: str, description: str) -> discord.Embed:\n return discord.Embed(title=title, description=description, colour=discord.Colour.red())", "def print_intro(self):\n \n print('Did you know that birds hold the record for longest animal migrations?')", "async def helps(ctx):\n embed = discord.Embed(title='**Help....**', description=\"The prefix for the bot is 'qq'.\\\nYah cuz you know _less qq, more pew pew_ ...\", colour=discord.Color.purple())\n embed.set_footer(text='For full list of commands with complete functions do _cmds')\n embed.add_field(name='Core', value='ping, help, cmds, botinfo')\n embed.add_field(name='Economy', value='cry, vaultoftears, tear shop', inline=False)\n embed.add_field(name='Entertainment', value='roast, flirt, compliment, geek, nerdystuff, quote, fortune,\\\n8ball, coffee, wannagrabacoffee, book, dadjoke', inline=False)\n embed.add_field(name='Utility', value='purge, ban, kick, unban', inline=False)\n embed.add_field(name='Games', value='diceroll, guessing_game', inline=False)\n await ctx.send(embed=embed)" ]
[ "0.6209566", "0.612537", "0.61086917", "0.6101209", "0.6089631", "0.58237666", "0.5739323", "0.57388747", "0.56776726", "0.56432605", "0.56215453", "0.5612619", "0.56114054", "0.5608273", "0.56033593", "0.55977595", "0.554897", "0.5476212", "0.54698044", "0.54592586", "0.5456238", "0.54393333", "0.542803", "0.5423795", "0.5419773", "0.539797", "0.53904283", "0.5312272", "0.527494", "0.52623767", "0.52358997", "0.52356416", "0.5223637", "0.52203965", "0.52153635", "0.5204028", "0.5200039", "0.5198479", "0.5186098", "0.51826316", "0.5164694", "0.51575875", "0.5148246", "0.51310927", "0.51296854", "0.51229393", "0.5122911", "0.5119962", "0.50865567", "0.50830615", "0.5079089", "0.50620025", "0.5057544", "0.5057544", "0.5049399", "0.5047977", "0.5039313", "0.5036368", "0.5030183", "0.5029548", "0.50237674", "0.5020581", "0.5001444", "0.50009906", "0.49962538", "0.49911457", "0.49766558", "0.49579427", "0.49518892", "0.49327093", "0.49297398", "0.49251077", "0.49218842", "0.49193016", "0.49164233", "0.49106076", "0.49085352", "0.49072146", "0.4905614", "0.48905042", "0.48889282", "0.48885554", "0.48842832", "0.48769176", "0.48691386", "0.48674282", "0.48615408", "0.48567232", "0.48513773", "0.4848026", "0.48430663", "0.48412353", "0.48334473", "0.48276114", "0.4827541", "0.48077828", "0.48028168", "0.4798455", "0.47952384", "0.47898385" ]
0.5328627
27
Add role to a user.
async def add_role( client, event, user: ('user', 'User to add role to'), role: ('role', 'The role to give'), ): # Check for permissions if not event.user_permissions.can_manage_roles: abort('You need `manage roles` permission to invoke this command.') if not event.guild.cached_permissions_for(client).can_manage_roles: abort('I need `manage roles` permission to execute this command.') if not event.user.has_higher_role_than(role): abort('You must have higher role than the role you are trying to give.') if not client.has_higher_role_than(role): abort('I must have higher role than the role you are trying to give.') # Using `.copy_to` on forms works as well. return ADD_ROLE_FORM.copy_with( title = f'Add role {role.name} to {user.full_name}', custom_id = f'add_role.{user.id}.{role.id}', )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def addrole(self, ctx, user: discord.Member=None, *, role=None):\r\n if user is None or role is None:\r\n return await ctx.send(\"Incorrect usage! *;addrole @user role*\")\r\n r = discord.utils.get(ctx.guild.roles, name=str(role))\r\n if r is None:\r\n return await ctx.send(f'{role} was not found')\r\n try:\r\n await user.add_roles(r)\r\n return await ctx.send(f\"**{str(user)}** has been given the role of **{role}** {self.bot.get_emoji(470063310386233344)}\")\r\n except discord.Forbidden:\r\n return await ctx.send(\"Bot does not have enough permissions to give roles.\")", "def add_role_to_user(self, user, role):\n user, role = self._prepare_role_modify_args(user, role)\n if role not in user.roles:\n user.roles.append(role)\n # noinspection PyUnresolvedReferences\n self.save(user)\n return True\n\n return False", "def add(self, user, role=None, roles=None):\n # TODO(adriant): resolve the roles and users into id's\n # user_id = base.getid(user)\n user_id = user\n # role_id = role\n if role:\n params = {\n 'roles': [role]\n }\n elif roles:\n params = {\n 'roles': roles\n }\n\n route = '/openstack/users/%s/roles'\n url = route % (user_id)\n try:\n self._put(url, json=params, response_key=None)\n except exc.HTTPBadRequest as e:\n print(e.message)\n return False\n\n return True", "def addUserRole(self, name, role):\n self._client.addUserRole(name, role)", "async def addrole(self, ctx, rolename, user: discord.Member=None):\n author = ctx.message.author\n channel = ctx.message.channel\n server = ctx.message.server\n\n if user is None:\n user = author\n\n role = self._role_from_string(server, rolename)\n\n if role is None:\n await self.bot.say('That role cannot be found.')\n return\n\n if not channel.permissions_for(server.me).manage_roles:\n await self.bot.say('I don\\'t have manage_roles.')\n return\n\n if author.id == settings.owner:\n pass\n elif not channel.permissions_for(author).manage_roles:\n raise commands.CheckFailure\n\n await self.bot.add_roles(user, role)\n await self.bot.say('Added role {} to {}'.format(role.name, user.name))", "def addRole(self, role):\n self._client.addRole(role)", "def add_role():\n role = roles.find_or_create_role(request.values.get('role_name', ''))\n user = users.get_or_404(int(request.values.get('user_id', '')))\n if not users.add_role_to_user(user, role):\n return {}, 500\n return {}", "def add_role(role):\n roleOfUser=Role.objects.create(type=role)\n return roleOfUser", "def add_user_role(self, tenant_id, user_id, role_id):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/tenants/\" + \\\n tenant_id + \"/users/\" + user_id + \"/roles/OS-KSADM/\" + role_id\n _headers = {'x-auth-token': self.cloud_admin_info['token_project']}\n _body = None\n response = self.request(\"PUT\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\"No response from Server while adding role\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Add user role Failed with status %s \" %\n response.status)\n return response.status\n\n LOG_OBJ.info(\"Role: %s is added to user:%s successfully.\"\n % (role_id, user_id))\n return True", "def set_role(self, user, role):\n obj = self._get_through_object(user)\n obj.role = role if isinstance(role, int) else obj.ROLE_MAP_REV[role]\n obj.save()", "def append_role(self, user):\n\n tx = self.iroha.transaction(\n [\n self.iroha.command(\n \"AppendRole\",\n account_id=f\"{user.gov_id}@afyamkononi\",\n role_name=user.type,\n )\n ],\n creator_account=f\"{self.creator_account_details.gov_id}@afyamkononi\",\n )\n\n IrohaCrypto.sign_transaction(tx, self.creator_account_details.private_key)\n return self.send_transaction_and_return_status(tx)", "def add_role(email, role):\n from enferno.user.models import Role\n u = User.query.filter(User.email == email).first()\n\n if u is None:\n print('Sorry, this user does not exist!')\n else:\n r = Role.query.filter(Role.name == role).first()\n if r is None:\n print('Sorry, this role does not exist!')\n u = click.prompt('Would you like to create one? Y/N', default='N')\n if u.lower() == 'y':\n r = Role(name=role)\n try:\n db.session.add(r)\n db.session.commit()\n print('Role created successfully, you may add it now to the user')\n except Exception as e:\n db.session.rollback()\n # add role to user\n u.roles.append(r)", "def add_role(self, role_id: str, current_user_id=None):\n if RoleModel.is_valid_role(role_id) and not self.has_role(role_id):\n user_role = UserRoleModel(user_id=self.id, role_id=role_id, lastchange_by=current_user_id)\n self.roles.append(user_role)", "def add_role(self, role):\n if role.name not in [r.name for r in self.roles]:\n return db[self.colNam].find_and_modify(query=dict(_id=self.id), update={'$push': {'roles': role.to_python()}})", "def add_role(self, principal, role):\n return permissions.utils.add_local_role(self, principal, role)", "def add_role(self, role, parents=[]):\r\n self._roles.setdefault(role, set())\r\n self._roles[role].update(parents)", "def add_user_to_role(request, username_or_email, role, group_title, event_name):\r\n username_or_email = strip_if_string(username_or_email)\r\n try:\r\n user = _user_from_name_or_email(username_or_email)\r\n except User.DoesNotExist:\r\n return u'<font color=\"red\">Error: unknown username or email \"{0}\"</font>'.format(username_or_email)\r\n\r\n role.add_users(user)\r\n\r\n # Deal with historical event names\r\n if event_name in ('staff', 'beta-tester'):\r\n track.views.server_track(\r\n request,\r\n \"add-or-remove-user-group\",\r\n {\r\n \"event_name\": event_name,\r\n \"user\": unicode(user),\r\n \"event\": \"add\"\r\n },\r\n page=\"idashboard\"\r\n )\r\n else:\r\n track.views.server_track(request, \"add-instructor\", {\"instructor\": unicode(user)}, page=\"idashboard\")\r\n\r\n return '<font color=\"green\">Added {0} to {1}</font>'.format(user, group_title)", "def add_user_role(user_name, tenant_name, role_name, auth_admin_url, admin_token):\n keystone = get_client(auth_admin_url, admin_token)\n role = keystone.roles.create(role_name)\n tenants = keystone.tenants.list()\n my_tenant = [x for x in tenants if x.name==tenant_name][0]\n users = keystone.users.list()\n my_user = [x for x in users if x.name==user_name][0]\n keystone.roles.add_user_role(my_user, role, my_tenant)", "def define_role(self, role):\n\n self._db_manager.create_role(role)", "def patch(self, username, role):\n try:\n UserService.add_role_to_user(token_auth.current_user(), username, role)\n return {\"Success\": \"Role Added\"}, 200\n except UserServiceError as e:\n return {\"Error\": str(e).split(\"-\")[1], \"SubCode\": str(e).split(\"-\")[0]}, 403", "def add_keystone_v3_role_to_user_or_group(self, user_id, role_id,\n pro_dom_id, id_flag):\n LOG_OBJ.debug(\"Adding the role.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/\" + id_flag + \"s/\" + \\\n str(pro_dom_id) + \"/users/\" + str(user_id) + \"/roles/\" + \\\n str(role_id)\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n response = self.request(\"PUT\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while adding role\")\n print (\"No response from Server while adding role\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Adding role Failed with status %s \"\n \"and error : %s\" % (response.status, response.data))\n print (\"Adding role Failed with status %s and error : %s\" %\n (response.status, response.data))\n return response.status\n return True", "def set_role(userid, role, group, request=None):", "def add_role(self, name):\n role = Role.by_name(name)\n if not role:\n role = Role(name)\n db.add(role)\n if not role in self.roles:\n self.roles.append(role)", "async def command_assign_role(self, context, role: str):\n try:\n await context.author.add_roles(discord.utils.get(\n context.guild.roles, name=role))\n await context.message.add_reaction('๐Ÿ‘')\n except Exception as e:\n await context.message.add_reaction('๐Ÿ‘Ž')\n await context.send('Role could not be assigned')\n print(f'Errored in command_assign_role.', e)", "def add_role(self, role):\n try:\n self.db_proxy.nameCheck(role.theName, 'role')\n except ARM.ARMException as ex:\n self.close()\n raise ARMHTTPError(ex)\n\n role_params = RoleParameters(\n name=role.theName,\n rType=role.theType,\n sCode=role.theShortCode,\n desc=role.theDescription,\n cProperties=[]\n )\n\n role_id = self.db_proxy.addRole(role_params)\n\n return role_id", "def assign_user_role(self, project_id, user_id, role_id):\n resp, body = self.put('projects/%s/users/%s/roles/%s' %\n (project_id, user_id, role_id), None)\n self.expected_success(204, resp.status)\n return service_client.ResponseBody(resp, body)", "def manage_addRole(self, role_id, title, description, RESPONSE=None,\n REQUEST=None):\n if not role_id:\n message = 'Please+provide+a+Role+ID'\n else:\n self.addRole(role_id, title, description)\n message = 'Role+added'\n\n if RESPONSE is not None:\n RESPONSE.redirect('%s/manage_roles?manage_tabs_message=%s' %\n (self.absolute_url(), message))", "def addRole(self, role=None, roleName=None, kvDict=None):\n return _modelActionBase(self, instance=role, instanceName=roleName, kvDict=kvDict,\n model=get_model('role'), db=db, action='add', modelType='role')", "def add_user(self, REQUEST):\n\n role_id = REQUEST.form['role_id']\n country_code = role_id.rsplit('-', 1)[-1]\n user_id = REQUEST.form['user_id']\n agent = self._get_ldap_agent()\n\n if not self._allowed(agent, REQUEST, country_code):\n return None\n if not nfp_can_change_user(self, user_id, no_org=False):\n # This means somebody is manipulating the DOM in order to\n # add a user that belongs to an organisation from another\n # country (the button doesn't normally appear)\n return None\n\n with agent.new_action():\n role_id_list = agent.add_to_role(role_id, 'user', user_id)\n\n role_msg = get_role_name(agent, role_id)\n msg = \"User %r added to role %s. \\n\" % (user_id, role_msg)\n\n # for Eionet Groups roles only, test if the added user is member of a\n # national organisation\n\n if self.is_eionet_group(role_id):\n if not get_national_org(agent, user_id, role_id):\n msg += (\n \"The user you want to add to an Eionet Group does not\"\n \" have a mandatory reference to an organisation for \"\n \"your country. Please corect!\")\n\n IStatusMessage(REQUEST).add(msg, type='info')\n\n log.info(\"%s ADDED USER %r TO ROLE %r\",\n logged_in_user(REQUEST), user_id, role_id_list)\n\n if '-awp-' in role_id:\n return REQUEST.RESPONSE.redirect(self.absolute_url() +\n '/awps?nfp=%s#role_%s' %\n (country_code, role_id))\n\n return REQUEST.RESPONSE.redirect(self.absolute_url() +\n '/nrcs?nfp=%s#role_%s' %\n (country_code, role_id))", "def set_role(username, role_name=\"\"):\n\tsession = get_session()\n\tdata = {\"username\": username, \"role\": role_name}\n\tsession.post(\"{url}/api/users/set_role\".format(url=get_registry_url()), json=data)", "def add_user_roles(userid:str, *roles):", "def add_user(self, user, role=OrganizationUserRole.MEMBER):\n users_count = self.users.all().count()\n if users_count == 0:\n role = OrganizationUserRole.OWNER\n org_user = self._org_user_model.objects.create(\n user=user, organization=self, role=role\n )\n if users_count == 0:\n self._org_owner_model.objects.create(\n organization=self, organization_user=org_user\n )\n\n # User added signal\n user_added.send(sender=self, user=user)\n return org_user", "def role_assign(user_id, role_id):\n user = _get_user_or_404(user_id)\n role = _get_role_or_404(role_id)\n initiator_id = g.user.id\n\n authorization_service.assign_role_to_user(\n role.id, user.id, initiator_id=initiator_id\n )\n\n flash_success(\n gettext(\n '%(role_title)s has been assigned to \"%(screen_name)s\".',\n screen_name=user.screen_name,\n role_title=role.title,\n )\n )", "def addRolePermission(self, role, _type):\n self._client.addRolePermission(role, _type)", "def create_role(self, **kwargs):\n\n role = self.role_model(**kwargs)\n return self.put(role)", "def add_employeeRole(self, id, role):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('INSERT INTO employeeRoles values(%s,%s)',\n (id, role))\n # get id and return updated object\n self.dbconnect.commit()\n except(Exception, self.dbconnect.get_error()) as error:\n self.dbconnect.rollback()\n raise Exception('\\nUnable to save EmployeeRole!\\n(%s)' % (error))", "async def apply_role(self, *, reason: str = None):\n if self.role not in self.member.roles:\n try:\n await self.member.add_roles(self.role, reason=reason)\n except discord.HTTPException:\n pass", "def role(self, role):\n\n self._role = int(role)", "def assign_member(self, project_id, user_id, role_id):\n resp = {}\n path = '/projects/%s/users/%s/roles/%s' % (project_id, user_id, role_id)\n res = self.client.call(path, 'PUT', data='', \n token=self.manager.identity.token) \n \n self.logger.debug('Grant role %s to user %s on project %s' % \n (project_id, user_id, role_id))\n return True", "async def userrole(self, ctx, *, role=None):\n server = ctx.message.guild\n\n if not role:\n result = await self.bot.db.config.find_one({'_id': str(server.id)})\n if result and result.get('user_role'):\n await ctx.send(f'The user role restricts which users are able to create and manage their own polls. \\n'\n f'The current user role is `{result.get(\"user_role\")}`. '\n f'To change it type `{result.get(\"prefix\")}userrole <role name>`')\n else:\n await ctx.send(f'The user role restricts which users are able to create and manage their own polls. \\n'\n f'No user role set. '\n f'To set one type `{result.get(\"prefix\")}userrole <role name>`')\n elif role in [r.name for r in server.roles]:\n await self.bot.db.config.update_one({'_id': str(server.id)}, {'$set': {'user_role': str(role)}}, upsert=True)\n await ctx.send(f'Server role `{role}` can now create and manage their own polls.')\n else:\n await ctx.send(f'Server role `{role}` not found.')", "async def addrole(self, ctx, member: discord.Member, role: discord.Role):\n role = discord.utils.get(ctx.guild.roles, id=role.id)\n\n muted_role = discord.utils.get(ctx.guild.roles, name=\"Muted\")\n punished_role = discord.utils.get(ctx.guild.roles, name=\"Punished\")\n\n if role > ctx.author.top_role:\n return await ctx.send(\n embed=discord.Embed(\n title=\"You don't have permission to add this role\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.darker_grey(),\n )\n )\n\n if role == muted_role or role == punished_role:\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Can not assign *{role}* role using this command.\",\n description=\"For more information run ```.help addrole```\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.red(),\n )\n )\n\n if role in member.roles:\n return await ctx.channel.send(\n embed=discord.Embed(\n title=f\"*{member}* already has *{role}* Role!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.greyple(),\n )\n )\n\n await member.add_roles(role)\n await ctx.send(\n embed=discord.Embed(\n title=f\"*{role}* has been added to *{member}*\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "def change_user_role(username, new_role):\n user_connector.change_user_role(username, new_role)", "def create_role(self, **kwargs):\n role = self.role_model(**kwargs)\n # noinspection PyUnresolvedReferences\n return self.save(role)", "def add_role():\r\n check_admin()\r\n\r\n add_role = True\r\n\r\n form = RoleForm()\r\n if form.validate_on_submit():\r\n role = Role(name=form.name.data,\r\n description=form.description.data)\r\n\r\n try:\r\n # add role to the database\r\n db.session.add(role)\r\n db.session.commit()\r\n flash('You have successfully added a new role.')\r\n except:\r\n # in case role name already exists\r\n flash('Error: role name already exists.',category='error')\r\n\r\n # redirect to the roles page\r\n return redirect(url_for('admin.list_roles'))\r\n\r\n # load role template\r\n return render_template('admin/roles/role.html', add_role=add_role,\r\n form=form, title='Add Role')", "def create_role(self, role_id, role):\n raise exception.NotImplemented() # pragma: no cover", "def role(self, role):\n\n self._role = role", "def role(self, role):\n\n self._role = role", "def set_rights(self, user, role):\n for_user = User.get_user_by_username(user)\n role = UserRole(role)\n self.access_handler.check_set_rights(for_user, role)\n for_user.set_user_type(role)", "def assign_user_role_on_project(self, project_id, user_id, role_id):\n resp, body = self.put('projects/%s/users/%s/roles/%s' %\n (project_id, user_id, role_id), None)\n self.expected_success(204, resp.status)\n return service_client.ResponseBody(resp, body)", "def add_admin(self, uid, name, role=None):\n uid = self._check_uid(uid)\n self._router_request(\n self._make_request_data(\n 'addAdminRole',\n data=dict(\n params=dict(\n uid=uid,\n name=name,\n role=role,\n )\n )\n )\n )\n\n return self.get_admin_by_name(uid, name)", "def add_role():\n check_admin()\n add_role = True\n\n form = RoleForm()\n if form.validate_on_submit():\n role = Role(title=form.title.data)\n\n try:\n db.session.add(role)\n db.session.commit()\n flash('New role successfully created')\n except:\n flash('Error: Role title already exist')\n\n return redirect(url_for('admin.get_roles'))\n\n return render_template('admin/roles/role.html', form=form, add_role=add_role, title='Add Role')", "async def addrole(self, ctx: context.CustomContext):\n\n await ctx.send(\n f\"{config.USER_INTERACTION_REQUIRED} Reply with the name of the role you want to create.\"\n )\n\n role_name = await ctx.converted_input(converter=converter.CaseInsensitiveRole)\n\n if isinstance(role_name, str):\n await ctx.send(\n f\"{config.YES} I will **create a new role** on this server named `{role_name}` for this.\"\n )\n try:\n discord_role = await ctx.guild.create_role(name=role_name)\n except discord.Forbidden:\n raise exceptions.ForbiddenError(\n exceptions.ForbiddenTask.CREATE_ROLE, role_name\n )\n\n else:\n discord_role = role_name\n\n await ctx.send(\n f\"{config.YES} I'll use the **pre-existing role** named `{discord_role.name}` for this.\"\n )\n\n role_join_message = await ctx.input(\n f\"{config.USER_INTERACTION_REQUIRED} Reply with a short message the user should see when they get the role.\"\n )\n\n try:\n await self.bot.db.execute(\n \"INSERT INTO selfrole (guild_id, role_id, join_message) VALUES ($1, $2, $3) \"\n \"ON CONFLICT (guild_id, role_id) DO UPDATE SET join_message = $3\",\n ctx.guild.id,\n discord_role.id,\n role_join_message,\n )\n except asyncpg.UniqueViolationError:\n return await ctx.send(\n f\"{config.NO} `{discord_role.name}` is already a selfrole on this server.\"\n )\n\n await ctx.send(f\"{config.YES} `{discord_role.name}` was added as a selfrole.\")", "def _add_users_to_role(self, users, rolename):\n role = Role.objects.get(name=rolename, course_id=self.course.id)\n for user in users:\n role.users.add(user)", "async def role(ctx, role: discord.Role = None):\n if role is None:\n await ctx.send(\"List of assignable roles: \" + str(allowed_roles))\n if role.name in allowed_roles:\n if not role in ctx.message.author.roles:\n await ctx.message.author.add_roles(role)\n await ctx.send(\"Role added.\")\n else:\n await ctx.message.author.remove_roles(role)\n await ctx.send(\"Role removed.\") \n else:\n await ctx.send(\"That role doesn't exist, or you don't have permission to modify it.\")", "def createTestUserRole(UserRoles, user):\n user_roles = UserRoles(user_id=user.id, role_id=3)\n db.session.add(user_roles)\n db.session.commit()\n logger.info(\"Added admin access for test user\")\n return", "def addRoleAccess(self, role, read, write, catalog='*', repository='*'):\n return self._client.addRoleAccess(role, read, write, catalog, repository)", "def add_user(self, user: User):\n raise NotImplementedError", "def add_role(profile, instance_profile, role):\n client = boto3client.get(\"iam\", profile)\n params = {}\n params[\"InstanceProfileName\"] = instance_profile\n params[\"RoleName\"] = role\n return client.add_role_to_instance_profile(**params)", "def _set_role(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'4 .. 32']}), is_leaf=True, yang_name=\"role\", rest_name=\"role\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Role of the user'}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"role must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'4 .. 32']}), is_leaf=True, yang_name=\"role\", rest_name=\"role\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Role of the user'}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__role = t\n if hasattr(self, '_set'):\n self._set()", "def addRole(self, role_id, title='', description=''):\n if self._roles.get(role_id) is not None:\n raise KeyError('Duplicate role: %s' % role_id)\n\n self._roles[role_id] = {'id': role_id, 'title': title,\n 'description': description}", "async def rolemenu_add_role(self,\n interaction: discord.Interaction,\n name: str,\n role: discord.Role,\n emoji: str = None,\n description: str = None):\n doc = await self.db.find_one({\n \"guild_id\": interaction.guild.id,\n \"name\": name\n })\n if not doc:\n return await interaction.response.send_message(\n \"No role menu with that name exists.\", ephemeral=True)\n for role_doc in doc[\"roles\"]:\n if role_doc[\"id\"] == role.id:\n return await interaction.followup.send(\n \"Role is already in the menu.\", ephemeral=True)\n if len(doc[\"roles\"]) >= 25:\n return await interaction.response.send_message(\n \"This role menu is full.\", ephemeral=True)\n await interaction.response.defer(ephemeral=True)\n if role.guild != interaction.guild:\n return await interaction.response.send_message(\n \"This role is not in this server.\")\n if emoji:\n if emoji.startswith(\"<\") and emoji.endswith(\">\"):\n try:\n emoji = int(emoji[1:-1].split(\":\")[2])\n except ValueError:\n return await interaction.followup.send(\"Invalid emoji.\")\n else:\n try:\n message = await interaction.original_message()\n await message.add_reaction(emoji)\n except discord.HTTPException:\n return await interaction.followup.send(\"Invalid emoji.\")\n await self.db.update_one({\"_id\": doc[\"_id\"]}, {\n \"$push\": {\n \"roles\": {\n \"description\": description,\n \"id\": role.id,\n \"emoji\": emoji,\n \"date_added\": datetime.datetime.now(datetime.datetime.u)\n }\n }\n })\n doc = await self.db.find_one({\"_id\": doc[\"_id\"]})\n await interaction.followup.send(f\"Added {role.mention} to the menu.\")\n menu = Menu(self, interaction.guild, doc)\n await menu.update()", "def assign_user_role_on_domain(self, domain_id, user_id, role_id):\n resp, body = self.put('domains/%s/users/%s/roles/%s' %\n (domain_id, user_id, role_id), None)\n self.expected_success(204, resp.status)\n return service_client.ResponseBody(resp, body)", "async def addtagrole(self, ctx, _role):\r\n\t\tif _role == 0:\r\n\t\t\tself.settings.ServerConfig(ctx.guild.id, 'TagRole', 0)\r\n\t\t\tawait ctx.send('Tag role set to: {}'.format(0))\r\n\t\telse:\t\r\n\t\t\trole = self.settings.Get(ctx, 'role', _role)\r\n\t\t\tif not role: return await ctx.send('Can\\'t find role: {}'.format(_role))\r\n\r\n\t\t\tself.settings.ServerConfig(ctx.guild.id, 'TagRole', role.id)\r\n\t\t\tawait ctx.send('Tag role set to: {}'.format(role))", "async def add_role_admin(request, role_id):\n required_fields = [\"id\"]\n utils.validate_fields(required_fields, request.json)\n\n txn_key, txn_user_id = await utils.get_transactor_key(request)\n proposal_id = str(uuid4())\n batch_list = Role().admin.propose.batch_list(\n signer_keypair=txn_key,\n signer_user_id=txn_user_id,\n proposal_id=proposal_id,\n role_id=role_id,\n next_id=request.json.get(\"id\"),\n reason=request.json.get(\"reason\"),\n metadata=request.json.get(\"metadata\"),\n )\n await utils.send(\n request.app.config.VAL_CONN, batch_list, request.app.config.TIMEOUT\n )\n return json({\"proposal_id\": proposal_id})", "def add_user(self, firstname, lastname, email, username, password, role):\n\n new_user = {\n \"id\": len(self.db) + 1,\n \"firstname\": firstname,\n \"lastname\": lastname,\n \"email\": email,\n \"username\": username,\n \"password\": password,\n \"role\": role\n }\n\n ALL_USERS.append(new_user)", "def add_user(self, role, emp_name, username, status, password):\n Log.info(\"Start to add user.\")\n self.click(self.user_add_btn)\n self.wait_unit_el_present(self.add_user_form)\n self.set_combox_value(role, self.user_role_select)\n self.input_text(emp_name, self.emp_name_input)\n self.input_text(username, self.user_name_input)\n self.set_combox_value(status, self.user_status_select)\n self.input_text(password, self.user_password_input)\n self.input_text(password, self.user_confirm_password)\n self.click(self.user_save_btn)\n self.wait_unit_el_present(self.user_table)\n Log.info(\"New user is added.\")", "def create(self, role):\n model = models.load('Role', role)\n model.account_id = self.account_id\n\n return self.client.create_role(model)", "async def add_role_member(request, role_id):\n required_fields = [\"id\"]\n utils.validate_fields(required_fields, request.json)\n txn_key, txn_user_id = await utils.get_transactor_key(request)\n proposal_id = str(uuid4())\n batch_list = Role().member.propose.batch_list(\n signer_keypair=txn_key,\n signer_user_id=txn_user_id,\n proposal_id=proposal_id,\n role_id=role_id,\n pack_id=request.json.get(\"pack_id\"),\n next_id=request.json.get(\"id\"),\n reason=request.json.get(\"reason\"),\n metadata=request.json.get(\"metadata\"),\n )\n batch_status = await utils.send(\n request.app.config.VAL_CONN,\n batch_list,\n request.app.config.TIMEOUT,\n request.json.get(\"tracker\") and True,\n )\n if request.json.get(\"tracker\"):\n return utils.create_tracker_response(\"batch_status\", batch_status)\n return json({\"proposal_id\": proposal_id})", "def add_role():\n\tcheck_admin()\n\tadd_role = True\n\n\tform = RoleForm()\n\tif form.validate_on_submit():\n\t\trole= Role(name= form.name.data,description=form.description.data)\n\n\t\ttry:\n\t\t\t#add role to the database \n\t\t\tdb.session.add(role)\n\t\t\tdb.session.commit()\n\t\t\tflash('You have successfully added a new role ')\n\t\texcept:\n\t\t\t#incase the role already exists\n\t\t flash(\"Error:the role already exists\")\n\n\t\t#redirect to the roles page\n\t\treturn redirect(url_for('admin.list_roles'))\n\n\t\t#load the role template\n\treturn render_template('admin/roles/role.html', add_role=add_role, form = form,title='Add Role')", "def addRole(self, name, description=\"\"):\n params = {\n \"f\" : \"json\",\n \"rolename\" : name,\n \"description\" : description\n }\n aURL = self._url + \"/roles/add\"\n return self._con.post(path=aURL, postdata=params)", "def grant_role(self, role, principal_ids):", "def add_users_to_role(self, rolename, users):\n params = {\n \"f\" : \"json\",\n \"rolename\" : rolename,\n \"users\" : users\n }\n rURL = self._url + \"/roles/addUsersToRole\"\n return self._con.post(path=rURL, postdata=params)", "def set_user_role(request):\n id_user = request.POST.get('user_id')\n role = request.POST.get('role')\n id_projet = request.POST.get('project_id')\n # retrieves the user whose role needs to be changed\n user_to_modify = User.objects.get(pk=id_user)\n # check if user can attribute role for the project\n project = UtilsData.get_object_by_type_and_id('project', id_projet)\n if request.user.can_affect(project):\n # Verifies if the user whose role is to be changed is the administrator\n if user_to_modify.is_superuser:\n return HttpResponse(json.dumps(\"error you can't remove admin role\"),\n content_type=\"application/json\")\n else:\n # change role\n project.setRole(user_to_modify, role)\n return HttpResponse(json.dumps(\"ok\"),\n content_type=\"application/json\")", "async def addRole(self, ctx, *roles_to_add):\n already_present_roles = [] # roles that will be deleted from \"roles_to_add\"\n\n available_roles = open(\"assets/roles.txt\", \"r\").readlines()\n available_roles = [role.lower().strip() for role in available_roles]\n\n output_msg = \"\"\n\n for role_to_add in roles_to_add:\n for role in available_roles:\n if role_to_add.lower() == role:\n output_msg += f\"Failed to add {role_to_add}: role already exists.\\n\"\n already_present_roles.append(role_to_add)\n break\n\n for role in already_present_roles:\n roles_to_add.remove(role)\n\n if roles_to_add:\n with open(\"assets/roles.txt\", \"a\") as f:\n for role in roles_to_add:\n f.write(f\"{role}\\n\")\n output_msg += f\"{role} has been added successfully.\\n\"\n\n await ctx.send(output_msg)", "def __setRole(self, session):\r\n self.__role = session.role\r\n if self._config.has_key('purpose'):\r\n co_role = ccm.get_role_for_purpose(session, self._config['purpose'])\r\n _logger.info(\"Switching user to role: %s\" % co_role)\r\n session.role = co_role\r\n _logger.info(\"Switched user to role: %s\" % session.role)", "async def process_add_reaction_role(\n user_id, description, ctx=None, inter=None, allowed_mentions=None\n):\n user = await User.get(user_id)\n response_deferred = await defer_inter(inter, ephemeral=True)\n view = disnake.ui.View(timeout=None)\n view.add_item(RoleDropdown(description))\n await send_message(\n key=\"choose_roles\",\n user=user,\n view=view,\n ephemeral=True,\n response_deferred=response_deferred,\n inter=inter,\n )", "def define_user(self, username, password, role=\"Operator\"):\n\n self._db_manager.create_user(username, password, role)", "def test_add_role(self):\n pass", "async def addRoles(self, ctx: Context, person: Member, roles: Greedy[Role]):\n roles = remove_dupe_roles(roles)\n\n await person.add_roles(*roles)\n await ctx.send(f\"Adding {roles_str(person, roles)}\")", "def update_role(self, role_id, role):\n raise exception.NotImplemented() # pragma: no cover", "def grant_role(ctx, address):\n skale = ctx.obj['skale']\n address = to_checksum_address(address)\n skale.schains.grant_role(skale.schains.schain_creator_role(),\n address)\n print('Success')", "async def create(self, **kwargs) -> 'role.Role':\n if not self._guild.me.guild_permissions.manage_roles:\n raise PermissionsError(\"manage_roles\")\n\n role_obb = role.Role(client=self._guild._bot,\n **(await self._guild._bot.http.create_role(self._guild.id)))\n self._roles[role_obb.id] = role_obb\n role_obb.guild_id = self._guild.id\n return await role_obb.edit(**kwargs)", "def setup_test_role(self):\n self.test_role = rand_name('role')\n resp, self.role = self.client.create_role(self.test_role)\n self.roles.append(self.role)", "def add_user(\n self,\n username,\n first_name,\n last_name,\n email,\n role,\n password=\"\",\n hashed_password=\"\",\n ):\n try:\n user = self.user_model()\n user.first_name = first_name\n user.last_name = last_name\n user.username = username\n user.email = email\n user.active = True\n user.roles = role if isinstance(role, list) else [role]\n if hashed_password:\n user.password = hashed_password\n else:\n user.password = generate_password_hash(password)\n self.get_session.add(user)\n self.get_session.commit()\n log.info(const.LOGMSG_INF_SEC_ADD_USER.format(username))\n return user\n except Exception as e:\n log.error(const.LOGMSG_ERR_SEC_ADD_USER.format(e))\n self.get_session.rollback()\n return False", "def make_admin(self):\n user_datastore = SQLAlchemyUserDatastore(db, User, Role)\n user_datastore.add_role_to_user(self, 'admin')\n db.session.commit()", "def assign_default_role(course_id, user):\r\n role, __ = Role.objects.get_or_create(course_id=course_id, name=\"Student\")\r\n user.roles.add(role)", "async def add_roles(self, ctx: commands.Context, *roles: discord.Role):\n if not roles:\n return await ctx.send_help()\n errored = \"\"\n message = \"\"\n added = []\n already_added = []\n for role in roles:\n if role >= ctx.author.top_role:\n errored += (\n \"{role}: You can't set a role equal to or higher than your own.\\n\".format(\n role=role.name\n )\n )\n continue\n if role >= ctx.guild.me.top_role:\n errored += (\n \"{role}: You can't set a role that's equal to or higher than the \"\n \"bot.\\n\".format(role=role.name)\n )\n continue\n async with self.config.guild(ctx.guild).autoroles() as roles_list:\n if role.id not in roles_list:\n roles_list.append(role.id)\n added.append(role.name)\n else:\n already_added.append(role.name)\n message += errored\n if added:\n message += \"\\nAdded role(s): {roles}\".format(roles=humanize_list(added))\n if already_added:\n message += \"\\nRole(s) already added: {roles}\".format(\n roles=humanize_list(already_added)\n )\n if message:\n for line in pagify(message):\n await ctx.send(line)", "def add_user(self, user_email, first_name, last_name, password, role):\n\n course_m = CourseManager(current_user.session)\n group_m = GroupManager(current_user.session)\n\n user = self.create(\n name=user_email,\n domain=course_m.find(name='default').id,\n password=password,\n email=user_email,\n first_name=first_name,\n last_name=last_name,\n vlan='',\n network_id=''\n )\n\n # Add mikrotik VPN user\n mikrotik_m = MikrotikManager()\n mikrotik_m.create_vpn_user(user_email, password)\n\n # Add to database\n user_model = UserModel(user.id)\n DATABASE.session.add(user_model)\n DATABASE.session.commit()\n\n if user is None:\n raise Exception('Could not create user')\n\n if role == 'student':\n self.add_to_group(user, group_m.find(name='students'))\n elif role == 'teacher':\n self.add_to_group(user, group_m.find(name='teachers'))\n\n return user", "def set(isamAppliance, name, user_name, type='embedded_ldap', check_mode=False, force=False):\n new_user = True\n ret_obj = ibmsecurity.isam.base.management_authorization.role.get(isamAppliance, name)\n\n if (ret_obj['data']['users'] == None):\n ret_obj['data']['users'] = []\n else:\n for usr in ret_obj['data']['users']:\n if usr['name'] == user_name:\n if usr['type'] == type:\n if force is False:\n return isamAppliance.create_return_object()\n new_user = False\n else: # Replace user with new type\n ret_obj['data']['users'].remove(usr)\n break\n\n if new_user is True:\n ret_obj['data']['users'].append({'name': user_name, 'type': type})\n\n if check_mode is True:\n return isamAppliance.create_return_object(changed=True)\n else:\n return isamAppliance.invoke_put(\n \"Add user to management authorization role\",\n \"/authorization/roles/{0}/v1\".format(name), ret_obj['data'])", "def syncRole(user, roleToAdd, listToAdd):\n print(user, \":\", roleToAdd)\n if roleToAdd == \"Doppelgรคnger\":\n listToAdd.append(\n Doppelganger(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Sbire\":\n listToAdd.append(Minion(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Loup-Garou\":\n listToAdd.append(Werewolf(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Loup Alpha\":\n listToAdd.append(\n AlphaWerewolf(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Loup Shamane\":\n listToAdd.append(\n ShamanWerewolf(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Franc-Maรงon\":\n listToAdd.append(Freemason(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Voyante\":\n listToAdd.append(Seer(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Chasseur de Fantรดmes\":\n listToAdd.append(GhostHunter(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Apprentie voyante\":\n listToAdd.append(\n BeginnerSeer(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Voleur\":\n listToAdd.append(Thief(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Noiseuse\":\n listToAdd.append(\n Troublemaker(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Soรปlard\":\n listToAdd.append(Drunkard(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Insomniaque\":\n listToAdd.append(Insomniac(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Divinateur\":\n listToAdd.append(Diviner(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Tanneur\":\n listToAdd.append(Tanner(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Chasseur\":\n listToAdd.append(Hunter(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Garde du corps\":\n listToAdd.append(BodyGuard(user=user, firstRole=roleToAdd, botRef=bot))\n\n elif roleToAdd == \"Loup rรชveur\":\n listToAdd.append(\n SleepingWerewolf(user=user, firstRole=roleToAdd, botRef=bot))\n else:\n print(\"GROS PROBLEME\", roleToAdd)\n exit()", "def view_add_user(self, user, username, password):\r\n user.realm._checker.addUser(username, password)", "def attach(profile, instance_profile, role):\n # Make sure the instance profile exists.\n if not exists(profile, instance_profile):\n msg = \"No instance profile '\" + str(instance_profile) + \"'.\"\n raise ResourceDoesNotExist(msg)\n\n # Make sure the role exists.\n if not role_jobs.exists(profile, role):\n msg = \"No role '\" + str(role) + \"'.\"\n raise ResourceDoesNotExist(msg)\n \n # Attach the role to the instance profile.\n params = {}\n params[\"profile\"] = profile\n params[\"instance_profile\"] = instance_profile\n params[\"role\"] = role\n return utils.do_request(instanceprofile, \"add_role\", params)", "def changeRole(self, node, role):", "def assign_roles(self, username, roles):\n params = {\n \"f\" : \"json\",\n \"username\" : username,\n \"roles\" : roles\n }\n uURL = self._url + \"/users/assignRoles\"\n return self._con.post(path=uURL, postdata=params)", "async def createrole(self, ctx, role: str):\n if role.lower() == \"muted\" or role.lower() == \"punished\":\n return await ctx.send(\"Can not create this roles.\")\n \"\"\"Create a new role\"\"\"\n role = await ctx.guild.create_role(name=role)\n return await ctx.send(\n embed=discord.Embed(\n title=f\"Role *{role}* has been created!\",\n timestamp=datetime.datetime.utcnow(),\n colour=discord.Colour.green(),\n )\n )", "def role_add(role, nodes, node, node_vars, host_vars, extra):\n role_manager = get_role_manager()\n node += nodes\n nodes, node_vars, host_vars, extra_args = _split_vars(\n node, node_vars, host_vars, extra)\n if not nodes:\n raise ArgumentError('No nodes informed')\n\n added_nodes = role_manager.add_role(\n role, hosts_node_map=nodes, host_vars=host_vars,\n node_vars=node_vars, extra_args=extra_args)\n\n print(f\"{len(added_nodes)} nodes were added to role {role}: {', '.join(sorted(added_nodes))}\")\n return 0", "def can_set_role(userid, role, group):", "def test_user_id_role_put(self):\n pass", "def add_role(self, name: str) -> Role:\n role = self.find_role(name)\n if role is None:\n try:\n role = self.role_model()\n role.name = name\n self.get_session.add(role)\n self.get_session.commit()\n log.info(const.LOGMSG_INF_SEC_ADD_ROLE.format(name))\n return role\n except Exception as e:\n log.error(const.LOGMSG_ERR_SEC_ADD_ROLE.format(e))\n self.get_session.rollback()\n return role", "async def register_role(self, ctx, role_name: str, role_title: str, guild_id: int = None):\n role_title = role_title.lower()\n\n # Raises expected AttributeError\n if guild_id is None:\n guild = ctx.guild\n guild_id = guild.id\n else:\n guild = self.bot.get_guild(guild_id)\n guild_id = guild.id\n if guild is None:\n await ctx.send(\"Couldn't find the guild provided.\")\n return\n\n try:\n role_id = discord.utils.get(guild.roles, name=role_name).id\n except AttributeError:\n # role returned is None\n await ctx.send(\"Role with name {} in guild {} could not be found.\".format(role_name, guild.name))\n return\n\n db_key = \"guild:{}:roles:roles:{}\".format(guild_id, role_title)\n\n self.config.set(\"{}:role_id\".format(db_key), role_id)\n\n self.config.hset(\"guild:{}:roles:all:names\".format(guild.id), role_title, role_id)\n\n await ctx.send(\"New role '{}' registered with keyword '{}'\".format(role_name, role_title))" ]
[ "0.82571524", "0.8177443", "0.78452474", "0.7781555", "0.77604127", "0.7734932", "0.7703948", "0.76375777", "0.7601349", "0.74736786", "0.74103475", "0.7407653", "0.7396722", "0.73556995", "0.73501986", "0.72284776", "0.7225049", "0.71885777", "0.7181446", "0.71257436", "0.708737", "0.70495754", "0.6970424", "0.6947069", "0.69153595", "0.68798214", "0.6849817", "0.6841097", "0.68126255", "0.6809245", "0.67987734", "0.67985904", "0.67753804", "0.6757385", "0.6738188", "0.67178255", "0.671557", "0.67094165", "0.66978735", "0.66846156", "0.6684217", "0.66674644", "0.6665891", "0.66502815", "0.6647209", "0.6625708", "0.6625708", "0.6599098", "0.655953", "0.6549256", "0.6519941", "0.64830977", "0.64784247", "0.6475045", "0.6463292", "0.6448277", "0.6445306", "0.64364403", "0.6408173", "0.64007884", "0.6356402", "0.6344083", "0.63335556", "0.6313238", "0.63105965", "0.63048786", "0.62976605", "0.6294114", "0.6286196", "0.6283867", "0.62404186", "0.62344307", "0.6190209", "0.6186085", "0.6182878", "0.61680126", "0.6164833", "0.6157718", "0.6144379", "0.6130176", "0.60951746", "0.6080979", "0.60601926", "0.6054184", "0.6052965", "0.60518867", "0.604892", "0.60301816", "0.6021844", "0.60139376", "0.6013374", "0.5998676", "0.59931445", "0.5987335", "0.5980452", "0.59789544", "0.5975811", "0.59746385", "0.59729236", "0.59698117" ]
0.8291285
0
Add a new waifu to the database!
async def add_waifu(): return WAIFU_FORM
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_to_database():\n db_conn.execute(\"INSERT INTO Fietsenstalling (Naam, Achternaam, Adress, FietsNr, PIN) VALUES \"\n \"(?, ?, ?, ?, ?);\",(Naam, Achternaam, Adress, FietsNr, PIN))\n\n db_conn.commit()", "def addUsertoDatabase(self):\r\n self.c.execute(\"\"\"INSERT INTO student_information VALUES (?,?,?)\"\"\",(self.name,self.password,self.budget,))\r\n self.con.commit()\r\n print(\"Added to Database Student..\")", "def add_wing():\n try:\n society_id = request.form['society_id']\n wing_name = request.form['wing_name']\n flat_no = request.form['flat_no']\n\n df = pd.DataFrame({'society_id': str(society_id), 'wing': str(wing_name), 'flat_no': str(flat_no)}, index=[0])\n\n with dbm.dbManager() as manager:\n manager.commit(df, 'visitor_management_schema.flat_details')\n success = True\n return jsonify(success)\n #return jsonify(q)\n\n except psycopg2.DatabaseError as error:\n errors = {'get_wing_list': False, 'error': (error)}\n return str(errors)", "def new():\n session = current_app.config['db']\n if request.method == \"POST\":\n new_name = request.form['itemname']\n try:\n item = WineABV(name=new_name)\n session.add(item)\n session.commit()\n except exc.IntegrityError:\n session.rollback()\n flash(\"Duplicate values!\", 'danger')\n item = WineABV(name=new_name)\n return render_template(template_prefix+'/new_form.html', item=item)\n\n flash(\"Successfully Added '%s'\" % (new_name,), 'success')\n return redirect(url_for('.show'))\n else:\n item = WineABV(name=\"\")\n return render_template(template_prefix+'new_form.html', item=item)", "def add_flair(flair: Flair) -> bool:\n # pylint: disable=no-member\n db.session.add(flair)\n return BasicDao.safe_commit()", "async def admin_add(self, ctx: MyContext, wormhole: str, user: discord.User):\n if not self.check_wh_exists(wormhole):\n await ctx.send(\n await self.bot._(\n ctx.guild.id, \"wormhole.error.not-exists\", name=wormhole\n )\n )\n return\n if not self.check_is_admin(wormhole, ctx.author.id):\n await ctx.send(await self.bot._(ctx.guild.id, \"wormhole.error.not-admin\"))\n return\n query = \"SELECT 1 FROM wormhole_admin WHERE name = ? AND admin = ?\"\n isAlready = len(self.bot.db_query(query, (wormhole, user.id))) > 0\n if not isAlready:\n query = \"INSERT INTO wormhole_admin (name, admin) VALUES (?, ?)\"\n self.bot.db_query(query, (wormhole, user.id))\n await ctx.send(\n await self.bot._(ctx.guild.id, \"wormhole.success.admin-added\")\n )\n else:\n await ctx.send(\n await self.bot._(\n ctx.guild.id, \"wormhole.error.already-admin\", user=user.name\n )\n )", "def add_new_user_to_db():\n first_name = request.form['first_name']\n last_name = request.form['last_name']\n img_url = request.form['img_url']\n\n new_user = User(first_name=first_name,last_name=last_name, img_url=img_url)\n db.session.add(new_user)\n db.session.commit()\n\n return redirect('/users')", "def add_user_to_db(json_details):\n return True", "def AddFolow(database):\n name1=str(input(\"Who do you want to add folow : \"))\n usr1,find1=getByName(database,name1)\n if not find1:\n print(\"the User could not be found\")\n return\n name2=str(input(\"Who do you want to folow : \"))\n usr2,find2=getByName(database,name2)\n if not find2:\n return\n usr1.addFolow(usr2)\n usr2.addFolower(usr1)\n saveDatabase(database,usr1)\n saveDatabase(database,usr2)", "def save(self, db):\n db.query(\n \"INSERT INTO fellows (name, accomodation)\\\n VALUES(:name, :accomodation)\",\n name=self.name, accomodation=self.wants_accomodation\n )", "def save(self, db):\n db.query(\n \"INSERT INTO staff (name) VALUES(:name)\",\n name=self.name\n )", "def add_store_to_db(self, connexion):\r\n # initiate a cursor\r\n cursor = connexion.cursor()\r\n # check if the store already exists in database\r\n cursor.execute(\"\"\"SELECT name FROM Store\r\n WHERE name = %s\"\"\", (self.name, ))\r\n rows = cursor.fetchall()\r\n if not rows:\r\n # insert data\r\n cursor.execute(\"\"\"INSERT INTO Store (name)\r\n VALUES (%(name)s)\"\"\", self.__dict__)\r\n # commit the changes\r\n connexion.commit()", "def add_user(self):\n query = \"INSERT INTO users (first_name, last_name, email, password) VALUES (%s, %s, %s, %s)\"\n self.cursor.execute(query,(\n self.first_name, \n self.last_name, \n self.email, \n self.password))", "def __addNewAdminDB(self,admin_id,username,password,name,comment,creator_id):\n query = self.__addNewAdminQuery(admin_id,username,password,name,comment,creator_id)\n query += self.__addNewAdminIASQuery(username, creator_id)\n db_main.getHandle().transactionQuery(query)", "def add(self, user: U) -> None:\n ...", "def add_entry_to_db(entry):\n db.session.add(entry)\n db.session.commit()", "def post(self):\n FeatureBusiness.add(request.get_json(), user_id=request.user_id)\n\n return {\"status\": 201}, 201", "def save_in_db(self):\n self.sql_database.table_name = self.table_db\n self.sql_database.db_name = self.db\n if self.sql_database.insert_item(text_path=self.path, word_first=self.word_1.get(),\n word_second=self.word_2.get(),\n word_third=self.word_3.get(), word_fourth=self.word_4.get(),\n word_fifth=self.word_5.get()):\n msg.showinfo(message=\"Done\")", "def insertarhab(fila):\n try:\n conexion.cur.execute('insert into habitacion(numero,tipo,prezo,libre) values(?,?,?,?)', fila)\n conexion.conex.commit()\n except sqlite3.OperationalError as e:\n print(e)\n conexion.conex.rollback()", "def insertarhab(fila):\n try:\n conexion.cur.execute('insert into habitacion(numero,tipo,prezo,libre) values(?,?,?,?)', fila)\n conexion.conex.commit()\n except sqlite3.OperationalError as e:\n print(e)\n conexion.conex.rollback()", "def testAddAndDatabaseUpdates(self):\n self.users.TESTAPI_resetFixture()\n self.assertEqual(len(models.UsersModel.objects.all()), 0)\n self.users.add(\"count\", \"necula\")\n self.assertEqual(len(models.UsersModel.objects.all()), 1)\n self.users.add(\"george\", \"necula\")\n self.assertEqual(len(models.UsersModel.objects.all()), 2)", "def add_haiku(cls, db_session, status, text, haiku, log_haiku: bool = None):\n log_haiku = log_haiku if log_haiku is not None else True\n\n tweet_haiku = cls(\n status_id_str=status[\"id_str\"],\n user_screen_name=status[\"user\"][\"screen_name\"],\n user_id_str=status[\"user\"][\"id_str\"],\n user_verified=status[\"user\"][\"verified\"],\n created_at=date_string_to_datetime(status[\"created_at\"]),\n text_original=get_tweet_body(status),\n text_clean=text,\n haiku=haiku,\n date_posted=None,\n date_deleted=None,\n )\n\n if log_haiku:\n db_session.add(tweet_haiku)\n try:\n db_session.commit()\n except Exception as e:\n logger.warning(f\"Exception when adding haiku: {e}\")\n db_session.rollback()\n\n return tweet_haiku", "def add_to_db(ark_obj):\n session = Session()\n session.add(ark_obj)\n session.commit()\n session.close()", "def AddUser(database):\n name=input(\"Enter the name of the user : \").lower()\n lastname=input(\"Enter the lastname of the user : \").lower()\n\n if f\"{name}_{lastname}\" in database.keys():\n print(\"the user already exists\")\n return\n\n age=int(input(\"Enter the age of the user : \"))\n yearStudy=int(input(\"Enter the year of study of the user : \"))\n fieldStudy=input(\"Enter the field of study of the user : \")\n nbinterest=int(input(\"how many interests does he have? : \"))\n interest=[]\n for i in range(nbinterest):\n interest.append(input(\"Enter the interest of the user : \"))\n city=input(\"Enter the city of the user : \") \n database[f\"{name}_{lastname}\"]=User(name,lastname,age,yearStudy,fieldStudy,city,interest)\n saveDatabase(database,database[f\"{name}_{lastname}\"])", "def add(self):\n\n db.session.add(self)\n db.session.commit()", "def add(self):\n\n db.session.add(self)\n db.session.commit()", "def create_db(self):", "def add_to_db(name, country, catches):\n try:\n with sqlite3.connect(db_name) as db:\n cur = db.cursor()\n create_table()\n cur.execute('insert into chainsaw values (?,?,?)', (name, country, catches))\n except sqlite3.Error as er:\n print('Changes being rolled back because of error:', er)\n traceback.print_exc()\n db.rollback()", "def add_new_worker(conn, workerId):\n query = \"INSERT INTO workers VALUES('{}', 0)\".format(workerId)\n conn.execute(query)\n conn.commit()\n return True;", "def add_user(self, username, password, name, department):\n db = sqlite3.connect(self.name)\n cur = db.cursor()\n cur.execute('SELECT MAX(ID) FROM users')\n maxid = cur.fetchone()[0]\n usid = maxid + 1 if maxid is not None else 0\n date = time.strftime('%Y.%m.%d')\n cur.execute(\n 'INSERT INTO users VALUES (?, ?, ?, ?, ?, ?, ?)',\n (usid, username, password, \"user\", name, department, 28)\n )\n db.commit()\n db.close()", "def add_member(data):\n print(\"Adding: %s \" % data)\n conn = create_connection(db_location)\n sql = \"INSERT INTO members(member_uid, name, email, badge_id, new) VALUES({}, \\\"{}\\\", \\\"{}\\\", \\\"{}\\\", \\\"{}\\\");\".format(\n data['id'], data['forename'] + \" \" + data['surname'], data['email'], data['badge_id'], data['new'])\n execute_sql(conn, sql)\n return", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()", "def add_user(user: dict):\n new_user = [user]\n insert_into_table('users', new_user)", "def insert(self):\n db.session.add(self)\n db.session.commit()", "def insert(self):\n db.session.add(self)\n db.session.commit()", "def insert(self):\n db.session.add(self)\n db.session.commit()", "def handle_add_user():\n new_user = User(first_name=request.form['first_name'], last_name=request.form['last_name'], image_url=request.form['image_url'])\n db.session.add(new_user)\n db.session.commit()\n\n return redirect('/')", "def add_user():\n\n email = request.form.get(\"email\")\n password = request.form.get(\"password\")\n fname = request.form.get(\"fname\")\n lname = request.form.get(\"lname\")\n language = request.form.get(\"language\")\n\n new_user = User(email=email, password=password,fname=fname,\n lname=lname,language=language)\n\n db.session.add(new_user)\n db.session.commit()\n\n return redirect(\"/\")", "def grasspi_add_db(table_name,row):\n\n if table_name == \"weatherdata\":\n\tconn = sqlite3.connect(grasspi_config.cfg.db_file)\n \tc = conn.cursor()\n\tc.execute('INSERT INTO ' + table_name + ' values (?,?,?,?,?,?,?,?,?,?,?,?,?)',[row[\"date\"],row[\"time\"],\n \trow[\"current_temp\"],row[\"current_rain\"],row[\"total_rain\"],row[\"current_wind_speed\"],\n \trow[\"current_wind_direction\"],row[\"current_humidity\"],row[\"current_air_pressure\"],\n \trow[\"current_shortwave_rad\"],row[\"current_atm_rad\"],row[\"day_length\"],row[\"elevation\"]])\n elif table_name == \"wateringschedule\":\n\tconn = sqlite3.connect(grasspi_config.cfg.db_file, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)\n \tc = conn.cursor()\n\tc.execute('INSERT INTO ' + table_name + ' values (?,?,?)',[row[\"zonenumber\"],\n\trow[\"starttime\"],row[\"duration\"]])\n # Save (commit) the changes\n conn.commit()\n # We can also close the cursor if we are done with it\n c.close()", "def add():\n name = request.form['name']\n message = request.form['message']\n\n try:\n newcurs = g.conn.execute(\"\"\"INSERT INTO record\n VALUES (%s, %s );\"\"\", name, message)\n newcurs.close()\n except Exception:\n print \"can not write record to database\"\n return redirect('/error')\n\n return render_template(\"index.html\", **locals())", "def add_training():\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n form = Training_Form()\n\n if form.validate_on_submit():\n training = Training(\n name = form.name.data,\n city = form.city.data,\n state = form.state.data,\n room = form.room.data,\n hours = form.hours.data,\n date = form.date.data,\n time = form.time.data\n )\n db.session.add(training)\n db.session.commit()\n \n flash(\"Training Added!\", \"success\")\n return redirect(\"/administrator\")\n\n else: \n\n return render_template(\"/admin/add_training.html\", form = form)", "def save_data(self):\n db.session.add(self)\n db.session.commit( )", "def run(self):\n self.db.table('points').insert({\n 'name': 'biblioteca',\n 'rfid': '123456'\n })", "def add_entry():\n username = util.remove_commas_from_string(request.form[\"name\"])\n link = util.remove_commas_from_string(request.form[\"ytLink\"])\n song = util.remove_commas_from_string(request.form[\"songName\"])\n\n festive = CHRISTMAS_MODE and \"christmasSong\" in request.form\n\n with database.connect_to_database() as db:\n user_id = database.get_userid(db, username)\n database.add_song(db, link, song, user_id, month=12 if festive else None)\n\n return redirect(url_for('main'))", "def add_water():\n\n user_id = session['user_id']\n drink = request.form.get('drink')\n postal = request.form.get('postal')\n time_updated = datetime.now()\n new_drink = Water(ounces=drink, user_id=user_id, time_updated=time_updated, postal=postal)\n\n db.session.add(new_drink)\n db.session.commit()\n \n time_zone = session[\"user_timezone\"]\n \n current_time = datetime.now().astimezone(pytz.timezone(time_zone))\n\n current_date = current_time.date()\n\n total_water_today = db.session.query(func.sum(Water.ounces)).filter(Water.user_id==user_id, Water.time_updated >= current_date).scalar()\n\n if int(total_water_today) != None or int(total_water_today) != 0:\n total_cups_today = round((total_water_today/8),2)\n else:\n total_water_today = 0\n total_cups_today = 0\n\n print('user id', user_id)\n print('current date', current_date)\n return f\"Today's Water: {total_water_today} Oz ({total_cups_today} Cups)\"", "def add_user(self, user):\n return self.ireporter_db.insert_data_users(\n user.get(\"firstname\"),\n user.get(\"lastname\"),\n user.get(\"othernames\"),\n user.get(\"username\"),\n user.get(\"email\"),\n user.get(\"phonenumber\"),\n user.get(\"is_admin\"),\n user.get(\"password\"),\n user.get(\"registered_on\")\n )", "def add_account(self, log, pword):\r\n #Placeholder : insert variables in sqlite3\r\n self.curs.execute(f\"\"\"INSERT INTO main_table VALUES (?, ?)\"\"\", (log, pword))\r\n self.conn.commit()", "def wishlist_add(request):\n\n result = {}\n\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n\n if p is None:\n result[\"result\"] = '0'\n else:\n w, created = Wishlist.objects.get_or_create(party=u, product=p)\n if created:\n w.comment=request.POST['comment']\n w.max_price=float(request.POST['max_price'])\n w.save() \n result[\"result\"] = str(w.id)\n else:\n result[\"result\"] = '-1'\n \n # add a feed\n f = Feed(actor=u, action=Feed.WISHLIST, product=p) \n f.save()\n \n return JSONHttpResponse(result)", "def save_user(self):\n db.session.add(self)\n db.session.commit()", "async def _ad_add(self, ctx, member: discord.Member):\n new_admin = sql.TalosAdmin((ctx.guild.id, member.id))\n if new_admin not in self.database.get_admins(ctx.guild.id):\n self.database.save_item(new_admin)\n await ctx.send(f\"Added admin {member.name}!\")\n else:\n await ctx.send(\"That user is already an admin!\")", "def add_weather(request):\r\n if request.method == \"POST\":\r\n form = AddWeatherForm(request.POST)\r\n if form.is_valid():\r\n form.save()\r\n # Display success message.\r\n messages.success(request, f'New weather record has been added successfully.', extra_tags='add-weather')\r\n return redirect('weather:weather-list')\r\n else:\r\n form = AddWeatherForm() \r\n context = {'form':form}\r\n return render(request, 'weather/add_weather.html', context)", "def save(self):\n self.db.commit()", "def add_user_to_db(new_profile):\n try:\n params = (new_profile.client_nickname,\n new_profile.client_username,\n new_profile.client_hostname,\n new_profile.client_port)\n client_db.execute(\"INSERT INTO clients VALUES (?, ?, ?, ?)\", params)\n client_detail_list.commit()\n client_detail_list.close()\n except:\n print('User already exists, try deleting the profile first.')", "def add(self, data):\n if self._filter(data):\n id = self.db._generate_id(data)\n \n if not id == None:\n if self.db._store:\n self.db.append(id, str(data))\n print id, \"stored to\", self.db._generate_path(id)\n else:\n print id\n print data.show2()", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='O'\n )", "def add_entry(self, scenario_info):\n scenario_id, status = scenario_info[\"id\"], \"created\"\n sql = self.insert()\n self.cur.execute(\n sql,\n (\n scenario_id,\n status,\n ),\n )", "def add_entry(self, scenario_info):\n scenario_id, status = scenario_info[\"id\"], \"created\"\n sql = self.insert()\n self.cur.execute(\n sql,\n (\n scenario_id,\n status,\n ),\n )", "def create(self):\n db.session.add(self)\n db.session.commit()", "def add_droid(data):\n print(\"Adding: %s \" % data) \n conn = create_connection(db_location)\n sql = \"INSERT INTO droids(droid_uid, name, member_uid, material, weight, transmitter_type, new) VALUES({}, \\\"{}\\\", \\\"{}\\\", \\\"{}\\\", \\\"{}\\\", \\\"{}\\\", \\\"{}\\\");\".format(\n data['id'], data['name'], data['member_uid'], data['material'], data['weight'], data['transmitter_type'], data['new'])\n execute_sql(conn, sql)\n return", "def favorite(user, wine):\n\n favorite = Favorite(user=user, wine=wine)\n\n db.session.add(favorite)\n db.session.commit()\n\n # return favorite", "def test_new_user_is_added(db_session):\n new_user = User(username=\"test\", password=\"test\")\n db_session.add(new_user)\n query = db_session.query(User).all()\n assert len(query) == 1", "def save(self, db):\n db.query(\n \"INSERT INTO rooms (name, type) VALUES(:name, :type)\",\n name=self.name, type='L'\n )", "def add_punteggio(self, id, punteggio):\n query = TABELLE['punteggio']['insert']\n return self.execute(query, (id, punteggio))", "def test_addUser(self):\n self.new_user.saveUser()\n self.assertEqual(len(User.users_list),1)", "def save(self)->None:\n database.cursor.execute(\n \"INSERT INTO users(firstname,lastname,othernames,email,phone,username,password,role) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) RETURNING id\", (\n self.first_name,\n self.last_name,\n self.other_name,\n self.email,\n self.phone_number,\n self.user_name,\n self.password,\n self.is_admin\n ))\n super().save()", "def save(self, db):\n pass", "def add_data_db_command():\n click.echo(\"Adding data to the database.\")\n add_data_db()\n click.echo(\"Finished adding data to the database.\")", "def create_wine(title, winery, variety, country, description, designation, points, province, region_1, region_2):\n\n wine = Wine(title=title,\n winery=winery,\n variety=variety,\n country=country,\n description=description,\n designation=designation,\n points=points,\n province=province,\n region_1=region_1,\n region_2=region_2)\n \n db.session.add(wine)\n db.session.commit()\n\n return wine", "def save(self):\r\n db.session.add(self)\r\n db.session.commit()", "def save_to_db(self):\n db.session.add(self)\n db.session.commit()\n # try:\n # db.session.add(self)\n # db.session.commit()\n # except exc.IntegrityError:\n # db.session.rollback()", "def add_admin(self, username, password):\n password_hash = generate_password_hash(password) # Generates a SHA256 hash.\n try:\n self.cur.execute(\"INSERT INTO admins VALUES(\\\"{}\\\", \\\"{}\\\")\".format(username, password_hash))\n self.db.commit()\n except:\n self.db.rollback()", "def saveNewUser(self, userID):\n self.db.insert_new_user(userID)", "def addUserEntry(userName):\n connector = appEngine.connect()\n rows = connector.execute('SELECT count(*) FROM user').rowcount\n newUserId = 'u' + str(ceil(time.time()))\n connector.execute('INSERT INTO user(userID,userName) VALUES(?, ?)', (newUserId, userName))", "async def monsave(self, ctx, *, entry):\r\n\r\n self.connect()\r\n discord_id = str(ctx.message.author.id)\r\n\r\n self.database.entries.insert_one({\r\n \"discord_id\": discord_id,\r\n \"entry\": entry\r\n })\r\n\r\n await ctx.send('You have successfully saved this entry in the Viking database.')", "def add_user(self, username, password): #WORKS\n password_hash = generate_password_hash(password) # Generates a SHA256 hash.\n try:\n self.cur.execute(\"INSERT INTO users VALUES(\\\"{}\\\", \\\"{}\\\")\".format(username, password_hash))\n self.db.commit()\n except:\n self.db.rollback()", "def insert_db():\n populate_tables()", "def webAdd( self, web ):\n web.add( self )", "def _create_account(user_id: int):\r\n now = datetime.now()\r\n _created_at = now.strftime(\"%m/%d/%Y at %H:%M:%S\")\r\n Wealth.collection.insert_one({\r\n \"_id\": user_id,\r\n \"coins\": 100,\r\n \"cookie\": 0,\r\n \"choc\": 0,\r\n \"poop\": 0,\r\n \"beans\": 0,\r\n \"pizza\": 0,\r\n \"waffles\": 0,\r\n \"Fish\": 0,\r\n \"apple\": 0,\r\n \"afk\": \"No status set, run w/status to set a status\",\r\n \"Reputation\": 0,\r\n \"LastUsed\": \"Isnotset\",\r\n \"TargetMember\": 0,\r\n \"BadgeSlot1\": \"Doesn't Have Noob\",\r\n \"BadgeSlot2\": \"Doesn't Have Beginner\",\r\n \"BadgeSlot3\": \"Doesn't Have Leader\",\r\n \"AccountCreated\": _created_at,\r\n \"Premium\": \"No\",\r\n \"Developer\": \"No\",\r\n \"Bank\": 0,\r\n \"Tickets\": 0,\r\n \"LastWithdraw\": \"No date\",\r\n \"LastTransfer\": \"No date\",\r\n \"MarriedTo\": \"Nobody\",\r\n \"MarriedDate\": \"No date\",\r\n })", "def save_db(self) -> None:", "def add_to_wish_list(self, user_id, caption):\n with self.connection:\n return self.cursor.execute(\"INSERT INTO 'wish_list' (`user_id`, `wish_list`) VALUES(?,?)\", (user_id,caption))", "def create():\n\tcreate_db()", "def add_user(db, user_data):\n username, password, email, position, phone = user_data[:5]\n\n # Set the new user id\n #users = db['user'].find()\n #next_id = max(u['_id'] for u in users) + 1\n\n # Set Access Level. 1 will be for a user that has some content to view.\n # Default level is 0\n access_level_map = {'D': 3, 'S': 2}\n access_level = access_level_map.get(position, 0)\n\n security_questions = []\n security_answers = []\n\n security_answers_hash = [generate_password_hash(ans)\n for ans in security_answers]\n\n password_hash = generate_password_hash(password)\n\n\n # Create the data JSON\n new_user = db['user'].insert_one({\n 'username': username,\n 'access_level': access_level,\n 'email': email,\n 'position': position,\n 'phone': phone,\n 'security_questions': security_questions,\n 'login_timestamp':str(datetime.datetime.utcnow()),\n 'deleted': False\n })\n\n db['security'].insert_one({\n 'user_id': str(new_user.inserted_id),\n 'password': password_hash,\n 'security_answers': security_answers_hash\n })\n\n # Insert user into DB\n return True", "def addRecord(self):\n\n ## Saving recorded entries to the CRM and Mailings Database\n print(\"Saving entries to the CRM and Mailings database...\")\n db_connection.executeQuery(\"INSERT INTO dbo.CRM (f_name, l_name, company, address, city, county, state, zip, primary_phone, secondary_phone, email_address) VALUES ('\" + self.first_name.replace(\"\\'\", \"\\'\\'\").title() + \"', '\" + self.last_name.replace(\"\\'\", \"\\'\\'\").title() + \"', '\" + self.crm_company_name.replace(\"\\'\", \"\\'\\'\").title() + \"', '\" + self.address.title() + \"', '\" + self.city.replace(\"\\'\", \"\\'\\'\").title() + \"', '\" + self.county.replace(\"\\'\", \"\\'\\'\").title() + \"', '\" + self.state_code.upper() + \"', '\" + str(self.zip_code) + \"', '\" + self.phone_number + \"', '\" + self.phone_number_2 + \"' , '\" + self.email_address + \"'); COMMIT\")\n db_connection.executeQuery(\"INSERT INTO dbo.Mailings (name, company, address) VALUES ('\" + self.first_name.replace(\"\\'\", \"\\'\\'\").title() + \" \" + self.last_name.replace(\"\\'\", \"\\'\\'\").title() + \"', '\" + self.company_name.replace(\"\\'\", \"\\'\\'\").title() + \"','\" + self.address + \" \" + self.city.title() + \" \" + self.county.title() + \" \" + self.state_code.upper() + \" \" + str(self.zip_code) + \"'); COMMIT\")", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def save(self):\n db.session.add(self)\n db.session.commit()", "def add_flower(flower_name):\n\n flower = Flower(name=flower_name)\n try:\n flower.save()\n return flower\n except Exception as e:\n print(e)\n return None", "def test_add_remove_from_wishlist(self):\n url = reverse('add-to-wishlist')\n data = {\n 'igdb': self.game.igdb,\n 'name': self.game.name,\n 'slug': self.game.slug,\n 'cover_id': self.game.cover_id,\n 'backdrop_id': self.game.backdrop_id\n }\n\n add = self.client.post(url, data, format='json')\n self.assertEqual(True, add.data['value'])\n\n remove = self.client.post(url, data, format='json')\n self.assertEqual(False, remove.data['value'])", "def add(self,who):\n my_info = self.get_info()\n try:\n nick_name = re.findall('nickname=\"(.*?)\" ',my_info)[0]\n except IndexError:\n nick_name = \" \"\n\n #code = self._add(who,nick_name,\"AddMobileBuddy\")\n code = self._add(who,nick_name)\n if code == 522:\n code = self._add(who,nick_name,\"AddMobileBuddy\")\n\n if code == 404 or code == 400 :\n log(\"Not Found\")\n return False\n if code == 521:\n log(\"Aleady added.\")\n return True\n if code == 200:\n return True\n\n return False" ]
[ "0.62806755", "0.6170017", "0.60218495", "0.59828955", "0.5796458", "0.57776827", "0.5697804", "0.5691923", "0.56790346", "0.5661239", "0.56476194", "0.5644497", "0.563909", "0.5632404", "0.5625129", "0.5596024", "0.5571909", "0.55557925", "0.55294317", "0.55294317", "0.552427", "0.5523987", "0.5523441", "0.5515276", "0.5512356", "0.5512356", "0.5503978", "0.54864985", "0.547812", "0.5475269", "0.5473161", "0.5468623", "0.5468623", "0.5468623", "0.5468623", "0.5422214", "0.5417066", "0.5417066", "0.5417066", "0.5410738", "0.5408679", "0.5392557", "0.53848606", "0.5379204", "0.53748894", "0.53660643", "0.53615415", "0.5361302", "0.53559387", "0.53513265", "0.534796", "0.533557", "0.533469", "0.53323096", "0.5328654", "0.53244907", "0.53049344", "0.5303197", "0.5301795", "0.5301795", "0.52935207", "0.52856135", "0.52846754", "0.5283521", "0.52787244", "0.5271043", "0.5265209", "0.5261558", "0.52611685", "0.5260227", "0.5249671", "0.5249078", "0.5244371", "0.5244265", "0.52424735", "0.5241394", "0.5238099", "0.523694", "0.5236132", "0.52325004", "0.52321297", "0.5232113", "0.5231866", "0.52316546", "0.5231085", "0.52192116", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.521635", "0.5214666", "0.5212666", "0.5211486" ]
0.67905253
0
Returns an added waifu.
async def get_waifu( name: ('str', 'Their name?') ): try: waifu = WAIFUS[name.casefold()] except KeyError: abort(f'There is no waifu named like: {name}.') return waifu.embed
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def add_waifu():\n return WAIFU_FORM", "def wave_add_new():\n return _u2i(_pigpio_command(_control, _PI_CMD_WVNEW, 0, 0))", "def wishlist_add(request):\n\n result = {}\n\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n\n if p is None:\n result[\"result\"] = '0'\n else:\n w, created = Wishlist.objects.get_or_create(party=u, product=p)\n if created:\n w.comment=request.POST['comment']\n w.max_price=float(request.POST['max_price'])\n w.save() \n result[\"result\"] = str(w.id)\n else:\n result[\"result\"] = '-1'\n \n # add a feed\n f = Feed(actor=u, action=Feed.WISHLIST, product=p) \n f.save()\n \n return JSONHttpResponse(result)", "def wa(self, council: str = \"1\") -> WA:\n return WA(self, council)", "def added_item(self, uid: str) -> object:\n return self._input[uid]", "def add(self, user: U) -> None:\n ...", "def get_wotd():\n\treturn wotd", "def add_powerup(powerup: str):\r\n global POWERUPS\r\n POWERUPS.append(powerup)", "def wiersz(self, fak, w):\n wie= MagWiersz()\n \n wie.id_dok= fak\n wie.il_dysp= -w.ilosc\n wie.il_real= -w.ilosc\n wie.jm= w.jm\n wie.cena_real= w.netto\n wie.cena_ewid= w.brutto - w.netto\n wie.vat= w.stawka\n wie.wartosc= w.brutto\n wie.rodzaj= '01'\n wie.upust= w.upust\n wie.konto= '732170090123'\n \n wie.save(using= settings.DBS(self.firma))\n \n return wie.wartosc", "def to_add(self):\n pass", "def addNewUser(self) -> str:\n userId = str(uuid.uuid4())\n\n if len(self.usersQueue):\n # Start timer or logic to change user\n self.timer.start()\n\n self.usersQueue.append(userId)\n return userId", "def add(self,who):\n my_info = self.get_info()\n try:\n nick_name = re.findall('nickname=\"(.*?)\" ',my_info)[0]\n except IndexError:\n nick_name = \" \"\n\n #code = self._add(who,nick_name,\"AddMobileBuddy\")\n code = self._add(who,nick_name)\n if code == 522:\n code = self._add(who,nick_name,\"AddMobileBuddy\")\n\n if code == 404 or code == 400 :\n log(\"Not Found\")\n return False\n if code == 521:\n log(\"Aleady added.\")\n return True\n if code == 200:\n return True\n\n return False", "def webAdd( self, web ):\n web.add( self )", "def save_data(self, new):\n db = self.check_db()\n db.append(new)\n\n return db", "def addwealth(self, w):\n self.wealth += w", "def add_water():\n\n user_id = session['user_id']\n drink = request.form.get('drink')\n postal = request.form.get('postal')\n time_updated = datetime.now()\n new_drink = Water(ounces=drink, user_id=user_id, time_updated=time_updated, postal=postal)\n\n db.session.add(new_drink)\n db.session.commit()\n \n time_zone = session[\"user_timezone\"]\n \n current_time = datetime.now().astimezone(pytz.timezone(time_zone))\n\n current_date = current_time.date()\n\n total_water_today = db.session.query(func.sum(Water.ounces)).filter(Water.user_id==user_id, Water.time_updated >= current_date).scalar()\n\n if int(total_water_today) != None or int(total_water_today) != 0:\n total_cups_today = round((total_water_today/8),2)\n else:\n total_water_today = 0\n total_cups_today = 0\n\n print('user id', user_id)\n print('current date', current_date)\n return f\"Today's Water: {total_water_today} Oz ({total_cups_today} Cups)\"", "def add_flower(flower_name):\n\n flower = Flower(name=flower_name)\n try:\n flower.save()\n return flower\n except Exception as e:\n print(e)\n return None", "def get_week_date():\n return timezone.now()+timezone.timedelta(days=6)", "def new():\n session = current_app.config['db']\n if request.method == \"POST\":\n new_name = request.form['itemname']\n try:\n item = WineABV(name=new_name)\n session.add(item)\n session.commit()\n except exc.IntegrityError:\n session.rollback()\n flash(\"Duplicate values!\", 'danger')\n item = WineABV(name=new_name)\n return render_template(template_prefix+'/new_form.html', item=item)\n\n flash(\"Successfully Added '%s'\" % (new_name,), 'success')\n return redirect(url_for('.show'))\n else:\n item = WineABV(name=\"\")\n return render_template(template_prefix+'new_form.html', item=item)", "def addPHA(self):\r\n\r\n if not self.isClosed:\r\n if self.__pha == '':\r\n self.__pha = App.getUserPHA()\r\n else:\r\n raise HDDOPermissionException('Tried to add Personal Health Address twice to a closed HealthDominoDataObject.')\r\n else:\r\n raise HDDOPermissionException('Tried to add Personal Health Address to a closed HealthDominoDataObject.')", "def add_W(self, W):\n self.Ws.append(W)", "def get_add_on():\n #List of all the add ons made with a list comprehension\n add_on_list = [[a, b] for a in list(string.ascii_lowercase) for b in list(string.ascii_lowercase)]\n global a_base\n #reset the a_base if it gets to high\n if a_base + a_key > len(add_on_list) - 1:\n a_base = -1\n #sets value of add_on\n add_on = add_on_list[a_base + a_key]\n add_on = \"\".join(add_on)\n a_base += a_key\n return add_on", "async def admin_add(self, ctx: MyContext, wormhole: str, user: discord.User):\n if not self.check_wh_exists(wormhole):\n await ctx.send(\n await self.bot._(\n ctx.guild.id, \"wormhole.error.not-exists\", name=wormhole\n )\n )\n return\n if not self.check_is_admin(wormhole, ctx.author.id):\n await ctx.send(await self.bot._(ctx.guild.id, \"wormhole.error.not-admin\"))\n return\n query = \"SELECT 1 FROM wormhole_admin WHERE name = ? AND admin = ?\"\n isAlready = len(self.bot.db_query(query, (wormhole, user.id))) > 0\n if not isAlready:\n query = \"INSERT INTO wormhole_admin (name, admin) VALUES (?, ?)\"\n self.bot.db_query(query, (wormhole, user.id))\n await ctx.send(\n await self.bot._(ctx.guild.id, \"wormhole.success.admin-added\")\n )\n else:\n await ctx.send(\n await self.bot._(\n ctx.guild.id, \"wormhole.error.already-admin\", user=user.name\n )\n )", "def add_wad(self, wad):\n\n self.wads.append(wad)", "def add(self, product):\n product_id = str(product.id)\n self.wishlist[product_id] = {'price': str(product.price)}\n self.save()", "async def edit_waifu(\n event,\n name: ('str', 'Their name?'),\n field : (['age', 'bio', 'hair'], 'Which field to edit?'),\n):\n key = name.casefold()\n try:\n waifu = WAIFUS[key]\n except KeyError:\n abort(f'There is no waifu named like: {name}.')\n \n if waifu.user is not event.user:\n abort('You can only edit waifus added by yourself.')\n \n text_input = FIELD_TO_TEXT_INPUT[field]\n \n # We auto-fill the current value\n text_input = text_input.copy_with(value = FIELD_TO_ATTRIBUTE[field].__get__(waifu, Waifu))\n \n return Form(\n f'Editing {waifu.name}',\n [text_input],\n custom_id = f'{CUSTOM_ID_WAIFU_EDIT_BASE}{key}',\n )", "def get_wahljahre():\n db = db_context.get_db()\n return jsonify(db.get_wahl_jahre())", "def gebruik(self):\n return self._gebruik.get_waarde()", "def AddFolow(database):\n name1=str(input(\"Who do you want to add folow : \"))\n usr1,find1=getByName(database,name1)\n if not find1:\n print(\"the User could not be found\")\n return\n name2=str(input(\"Who do you want to folow : \"))\n usr2,find2=getByName(database,name2)\n if not find2:\n return\n usr1.addFolow(usr2)\n usr2.addFolower(usr1)\n saveDatabase(database,usr1)\n saveDatabase(database,usr2)", "def addU(self,name):\n return self._addVar(name,self._uNames)", "def addOne():\n print(inspect.stack()[1][3])\n # read data from the API call\n req_data = request.get_json()\n json_data = {}\n\n for req in req_data:\n if (req in Followup.c.keys()):\n json_data[req] = req_data[req]\n\n query = (\n insert(Followup).\n values(json_data)\n )\n ResultProxy = connection.execute(query)\n if(not ResultProxy):\n return {'error': 'Unable to Add the given client'}\n return {'status': \"Adding Succesful\"}", "def return_weekly_questions_save_weight(self, weekly_weight, id_user):\n # get data\n context = {}\n weighing_date = ResultsUser.objects.values_list(\"weighing_date\")\n last_weighing_date = weighing_date.filter(user=id_user).order_by(\"weighing_date\").last()[0]\n one_week_after_weighing = last_weighing_date + timedelta(days=7)\n present = datetime.now()\n present_date = present.date()\n\n # one week after\n # the weighing last\n if present_date >= one_week_after_weighing:\n\n # if the user gave\n # his weekly weight\n if weekly_weight is not False:\n\n # if the user has reached\n # his weight goal\n final_weight = ProfileUser.objects.values_list(\"final_weight\").get(user=id_user)[0]\n if float(weekly_weight) <= final_weight:\n context[\"robot_comment\"] = self.return_text_congratulations_restart_program\\\n (id_user)\n self.end = True\n\n # save weight\n else:\n context[\"robot_comment\"] = \"J'ai bien pris note de ton poids, \" \\\n \"tu trouveras un rรฉcapitulatif dans \" \\\n \"l'onglet rรฉsultats.\"\n user = self.user.objects.get(id=id_user)\n ResultsUser.objects.create(user=user, weight=weekly_weight)\n self.new_week = True\n\n # create robot question\n else:\n context[\"robot_comment\"] = \"Bonjour ! J'รฉspรจre que ta semaine \" \\\n \"s'est bien passรฉe ? Que donne ta pesรฉe \" \\\n \"ce matin ?\"\n context[\"robot_weekly_weight\"] = True\n\n # during the first week after\n # the weighing last : create robot text\n else:\n month = calendar.month_name[one_week_after_weighing.month]\n date = \"\" + calendar.day_name[one_week_after_weighing.weekday()] + \\\n \" \" + str(one_week_after_weighing.day) \\\n + \" \" + month + \"\"\n context[\"robot_comment\"] = \"Retrouvons nous ici {} pour faire le point \" \\\n \"sur tes prochains rรฉsultats et voir ton nouveau \" \\\n \"challenge !\".format(date)\n\n return context", "def add(self):\n pass", "def getOrAddID(self, id : int) -> bbUser.bbUser:\n return self.getUser(id) if self.userIDExists(id) else self.addUser(id)", "def add_to_wishlist(self, beer_id: str) -> Dict:\n method = \"user/wishlist/add\"\n auth = self._get_access_token()\n params = {\"bid\": beer_id}\n return self._do_get(method, auth, params)", "def add_unit(self):\n detach = self.army.detachments[self._get_user_detachment()]\n battlefield_role = self._get_user_battlefield_role()\n unit = self._create_user_unit(battlefield_role)\n self._add_unit(detach, unit)\n return", "def add_wing():\n try:\n society_id = request.form['society_id']\n wing_name = request.form['wing_name']\n flat_no = request.form['flat_no']\n\n df = pd.DataFrame({'society_id': str(society_id), 'wing': str(wing_name), 'flat_no': str(flat_no)}, index=[0])\n\n with dbm.dbManager() as manager:\n manager.commit(df, 'visitor_management_schema.flat_details')\n success = True\n return jsonify(success)\n #return jsonify(q)\n\n except psycopg2.DatabaseError as error:\n errors = {'get_wing_list': False, 'error': (error)}\n return str(errors)", "def webAdd( self, web ):\n web.addNamed( self )", "def get_week():\n now = dt.now().date()\n return Week.objects.filter(date__lte=now).order_by('-date').first()", "def wwid(self):\n return self._uuid", "async def _ad_add(self, ctx, member: discord.Member):\n new_admin = sql.TalosAdmin((ctx.guild.id, member.id))\n if new_admin not in self.database.get_admins(ctx.guild.id):\n self.database.save_item(new_admin)\n await ctx.send(f\"Added admin {member.name}!\")\n else:\n await ctx.send(\"That user is already an admin!\")", "def add(self, item):", "def fusion_api_add_power_device(self, body, api=None, headers=None):\n return self.pd.create(body=body, api=api, headers=headers)", "def store_action_in_wm(self, u):\n\n self.u_seq[-1].append(u)", "def add_on(self) -> pulumi.Output[Optional['outputs.InstanceAddOn']]:\n return pulumi.get(self, \"add_on\")", "def OnSelfAdded(event, wavelet):\n logging.info('OnSelfAdded')\n blip = event.blip\n wavelet.title = 'A wavelet title'\n blip.append(element.Image(url='http://www.google.com/logos/clickortreat1.gif',\n width=320, height=118))\n blip.append(element.Line(line_type='li', indent='2'))\n blip.append('bulleted!')\n blip.append(element.Installer('http://wave-skynet.appspot.com/public/extensions/areyouin/manifest.xml'))\n\n # add a reply to the blip authored by a proxy. Effectively\n # the address on this will be kitchensinky+proxy@appspot.com.\n # Note that as a side effect this will also add this\n # participant to the wave.\n wavelet.proxy_for('proxy').reply().append('hi from douwe')\n inlineBlip = blip.insert_inline_blip(5)\n inlineBlip.append('hello again!')\n\n # Create a new wave. The new wave will have its own operation queue.\n # new_wave also takes an optional 'message' parameter which can be\n # set to an arbitrary string. By setting it to the serialized version\n # of the current wave, we can reconstruct the current wave when the\n # other wave is constructed and update the current wave.\n new_wave = sinky.new_wave(wavelet.domain,\n wavelet.participants,\n message=wavelet.serialize())\n new_wave.participants.set_role(wavelet.creator,\n wavelet_mod.Participants.ROLE_READ_ONLY)\n new_wave.root_blip.append('A new day and a new wave')\n new_wave.root_blip.append_markup(\n '<p>Some stuff!</p><p>Not the <b>beautiful</b></p>')\n\n # since the new wave has its own operation queue, we need to submit\n # it explicitly through the active gateway, or, as in this case,\n # submit it together with wavelet, which will handle the submit\n # automatically.\n new_wave.submit_with(wavelet)", "async def waifu(self, ctx):\n e = discord.Embed(title=\"Here is a waifu image for you {}.\".format(ctx.author.name), color=discord.Color.magenta())\n e.set_image(url=nekos.img('waifu'))\n await ctx.send(embed=e)", "def favorite(user, wine):\n\n favorite = Favorite(user=user, wine=wine)\n\n db.session.add(favorite)\n db.session.commit()\n\n # return favorite", "def getById(self, id):\r\n try:\r\n c = self.conn.cursor()\r\n c.execute(\"SELECT * FROM Ksiazka WHERE id=?\", (id,))\r\n wpis_row = c.fetchone()\r\n ksiazka = Ksiazka(id=id)\r\n if wpis_row == None:\r\n ksiazka=None\r\n else:\r\n ksiazka.date = wpis_row[1]\r\n c.execute(\"SELECT * FROM Wpisy WHERE ksiazka_id=? order by name\", (id,))\r\n wpis_items_rows = c.fetchall()\r\n items_list = []\r\n for item_row in wpis_items_rows:\r\n item = WpisItem(name=item_row[0], nazwisko=item_row[1], numer=item_row[2], ulica=item_row[3], nrdomu=item_row[4], nrmieszkania=item_row[5], miasto=item_row[6])\r\n items_list.append(item)\r\n ksiazka.wpisy=items_list\r\n except Exception as e:\r\n #print \"ksiazka getById error:\", e\r\n raise RepositoryException('error getting by id ksiazka_id: %s' % str(id))\r\n return ksiazka", "def test_add_remove_from_wishlist(self):\n url = reverse('add-to-wishlist')\n data = {\n 'igdb': self.game.igdb,\n 'name': self.game.name,\n 'slug': self.game.slug,\n 'cover_id': self.game.cover_id,\n 'backdrop_id': self.game.backdrop_id\n }\n\n add = self.client.post(url, data, format='json')\n self.assertEqual(True, add.data['value'])\n\n remove = self.client.post(url, data, format='json')\n self.assertEqual(False, remove.data['value'])", "def post(self):\n FeatureBusiness.add(request.get_json(), user_id=request.user_id)\n\n return {\"status\": 201}, 201", "def add():\n pass", "def add_unit(self):\n self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).click()", "def get_random_addition(self):\r\n try:\r\n session = self.persistence.get_session()\r\n addition = session.query(TopicAddition).order_by(func.random()).limit(1).one()\r\n return addition\r\n except NoResultFound:\r\n raise NoAdditionAvailable", "def get_or_add_dukaan():\n if request.method == \"POST\":\n payload = request.json\n # payload = change_case(payload, \"lower\")\n business = db.dukaans.find_one({\"name\": payload[\"name\"]})\n if business is not None:\n return (\n jsonify(\n {\n \"success\": False,\n \"message\": \"Business name already exists, Please choose another name.\",\n }\n ),\n 400,\n )\n\n for required_key in business_schema:\n if required_key not in payload.keys():\n return jsonify({\"message\": f\"Missing {required_key} parameter\"}), 400\n\n db.dukaans.insert_one(payload)\n return jsonify({\"success\": True, \"dukaan\": clean_dict_helper(payload)}), 201\n\n dukaans = list(db.dukaans.find({}).limit(5))\n for dukaan in dukaans:\n if len(dukaan.get(\"categories\", [])) > 0:\n dukaan[\"categories\"] = [\n db.categories.find_one({\"_id\": ObjectId(_id)})[\"name\"]\n for _id in dukaan[\"categories\"]\n ]\n ratings = list(db.ratings.find({\"business\": str(dukaan[\"_id\"])}, {\"rating\": 1}))\n if len(ratings) > 0:\n ratings_sum = sum([r[\"rating\"] for r in ratings])\n dukaan[\"avg_rating\"] = float(ratings_sum) / float(len(ratings))\n else:\n dukaan[\"avg_rating\"] = 0.0\n\n return jsonify({\"success\": True, \"dukaans\": clean_dict_helper(dukaans)})", "def resource(self):\n return self.add_resource", "def add_punteggio(self, id, punteggio):\n query = TABELLE['punteggio']['insert']\n return self.execute(query, (id, punteggio))", "async def add(ctx, pkmn_id: int):\n res = database.add_to_party(ctx.message.author, pkmn_id)\n if not res:\n ctx.send(\"**Oak**: Make sure you actually have that pokemon or if your party is not full ya scrub.\")\n return await show_party(ctx.message.author)", "def _add_utxo(self, item: UtxoIndexItem) -> None:\n raise NotImplementedError", "def add_to_wish_list(self, user_id, caption):\n with self.connection:\n return self.cursor.execute(\"INSERT INTO 'wish_list' (`user_id`, `wish_list`) VALUES(?,?)\", (user_id,caption))", "def addWorker(self, user):\n if (user != self.owner) and not self.workers.filter(pk=user.id).exists():\n self.workers.add(user)\n if self.prospects.filter(pk=user.id).exists():\n self.prospects.remove(user)\n if self.blacklist.filter(pk=user.id).exists():\n self.blacklist.remove(user)\n return self\n return None", "def add(self, data):\n if self._filter(data):\n id = self.db._generate_id(data)\n \n if not id == None:\n if self.db._store:\n self.db.append(id, str(data))\n print id, \"stored to\", self.db._generate_path(id)\n else:\n print id\n print data.show2()", "def cassh_add(current_user=None):\n return render_template('add.html', username=current_user['name'], \\\n logged_in=current_user['is_authenticated'])", "def get_one_meal():", "def OnAdd(self, controller):\n pass", "def get(uai):\n return BceInstitution.query.filter_by(uai=uai).first()", "def add_player(self, player):\r\n self.players[player.id] = copy.copy(player)\r\n return self.players[player.id]", "def save_admin():\n user = users.get_current_user()\n if user:\n if users.is_current_user_admin() or is_local_admin():\n added_admin_mail = request.forms.get('mail')\n added_admin = users.User(added_admin_mail)\n if added_admin:\n #return added_admin.user_id()\n #output = added_admin.nickname()+ ' ' + added_admin.user_id()\n #return output\n new_admin = Admins(parent=admin_base)\n new_admin.ref_nick = user.nickname()\n new_admin.admin_nick = added_admin.nickname()\n new_admin.admin_id = 'no id'#added_admin.user_id()\n new_admin.put()\n redirect('/admin')\n \n else:\n #return \"ะะต ะฟะพะปัƒั‡ะธะปะพััŒ\"\n output = template('admin', name=g_name, log_in_out = users.create_logout_url('/'), opt = 'ะ’ั‹ั…ะพะด', user = user.nickname(), admins = admins, error=\"ะะตะฒะตั€ะฝั‹ะน email\")\n return output\n else:\n redirect('/')\n else:\n redirect('/')", "def create_work_item(self):", "def add_instance(self,name):\n new = self.create_instance(name)\n self.model.append(new)\n return new", "def hw_add(bot, update, args, session=session):\n print(\"Calling hw_add\")\n if not len(args):\n bot.send_message(chat_id=update.message.chat_id,\n text=\"added nothing: you're missing an argument\")\n return False\n\n subject = args[0]\n homework = \" \".join(args[1:])\n db_actions.add_hw(subject, homework, session)\n\n bot.send_message(chat_id=update.message.chat_id, text=\"added {} : {} \".format(\n subject, homework\n ))", "def add_fund(self):\n pass", "def add_member(self, user, is_admin=False):\n # Only add members if they are not already one\n membership_count = InitiativeMembership.objects.filter(user=user, initiative=self).count()\n\n if membership_count > 0:\n raise SuspiciousOperation(\"Is already a member\")\n\n return InitiativeMembership.objects.create(user=user, initiative=self, is_admin=is_admin)", "def test_return_weekly_questions_save_weight(self):\n # create user\n user_created = self.create_user_start_program_number_days_ago(7)\n\n # TEST NEW WEIGHT DON'T EXISTS\n # data\n weekly_weight = False\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n self.assertEqual(context[\"robot_comment\"], \"Bonjour ! J'รฉspรจre que ta semaine \"\n \"s'est bien passรฉe ? \"\n \"Que donne ta pesรฉe ce matin ?\")\n self.assertTrue(context[\"robot_weekly_weight\"])\n\n # TEST ADD THE NEW WEIGHT\n # data\n weekly_weight = 58\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n last_weight = ResultsUser.objects.values_list(\"weight\").filter(user=user_created)\\\n .order_by(\"weighing_date\").last()[0]\n self.assertEqual(context[\"robot_comment\"], \"J'ai bien pris note de ton poids, \"\n \"tu trouveras un rรฉcapitulatif dans \"\n \"l'onglet rรฉsultats.\")\n self.assertEqual(last_weight, weekly_weight)\n\n # TEST AFTER ADD THE NEW WEIGHT\n # data\n weekly_weight = False\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n last_weighing_date = ResultsUser.objects.values_list(\"weighing_date\")\\\n .filter(user=user_created).order_by(\"weighing_date\").last()[0]\n one_week_after_weighing = last_weighing_date + timedelta(days=7)\n month = calendar.month_name[one_week_after_weighing.month]\n date_next_challenge = \"\" + calendar.day_name[one_week_after_weighing.weekday()] + \" \" \\\n + str(one_week_after_weighing.day) + \" \" + month + \"\"\n self.assertEqual(context[\"robot_comment\"], \"Retrouvons nous ici {} pour faire le point sur \"\n \"tes prochains rรฉsultats et voir ton nouveau \"\n \"challenge !\".format(date_next_challenge))", "def create(feature, bo=None):\n if feature is None:\n features = BOFeatures(bo)\n return(features)\n \n else:\n \n if feature.is_collection:\n return(feature)\n else:\n features = BOFeatures(bo)\n features.add(feature)\n return(features)", "def add_feature(request):\n\n r = {}\n if request.POST.get('code','000') == 'ch00seW199Er':\n # pick a random location\n featured_already = Featured.objects.all().values('location')\n locations = Location.objects.exclude(id=1).exclude(id__in=featured_already).exclude(name__iregex=r'[\\w# ]+(wash|washer|dryer|dyer)[\\w# ]*').filter(type=Location.EATERY)\n features = sample(locations, 10)\n i = randint(0,9)\n selected = features[i]\n tomorrow = date.today()+timedelta(1)\n \n f = Featured(location=selected, \n day=tomorrow,\n description=\"50 cents off if you transact here today\",\n amount=0.5,\n expires=datetime(tomorrow.year, tomorrow.month, tomorrow.day, 13,59))\n f.save() \n r['result'] = {'location': selected.name, 'loc_id': selected.id}\n else:\n r['result'] = '-1'\n return JSONHttpResponse(r)", "def add(self, mu, **which):\n # Handle case where mu already seen\n k, _ = self.find(mu)\n if k is not None:\n self.which[k].update(which)\n return k\n # Handle case where mu not seen\n self.mu_db.append(mu.copy())\n self.which.append(which)\n return self.offset+len(self.mu_db)-1", "def add(self, document):\n return self.db.update({document['id']: document})", "def add_item(self):\n item = LibGen.create_item()\n if not self.item_exists(item.call_number):\n self.item_list[item.call_number] = item\n print(f\"Item({item.call_number}) bas been added.\")\n else:\n print(\"This item already exists.\")", "def addUser(self, user):\r\n self.users.append(user)\r\n return len(self.users)-1", "def add_new_player(self) -> None:\n\n # 1\n for elem in self.data:\n key = ''\n value = ''\n for k, v in elem.items():\n if k == 'name':\n key = v\n else:\n value = v.get()\n self.attributs.update({key: value})\n\n # 2\n order = ct.Controls.verify_players_creation(self.attributs)\n self.master.master.list_instances_menus_tournament = Menu.update_menus_tournament(order, self.master)\n self.master.master.left_window.update_and_display(self.master.master.list_instances_menus_tournament)\n # 3\n if order['order'] == 'repeat_step':\n self.display()\n else:\n self.destroy_window()\n self.master.master.launch()", "def getEnergyAdded(self):\n return self.json_state.get(\"charging\").get(\"wh_energy\")", "def AddWavelet(self, wavelet_data):\n wavelet = OpBasedWavelet(wavelet_data, self)\n self._wavelets[wavelet.GetId()] = wavelet\n return wavelet", "def add_on(self) -> Optional[pulumi.Input['InstanceAddOnArgs']]:\n return pulumi.get(self, \"add_on\")", "def add_on(self) -> Optional[pulumi.Input['InstanceAddOnArgs']]:\n return pulumi.get(self, \"add_on\")", "def add_weapon(self, weapon):\n self.eq.append(weapon)\n weapon.get_sound.play()", "def addme(update: 'Update', context: 'CallbackContext'):\n user_id = update.effective_user.id\n chat_id = update.effective_chat.id\n chats = get_chat_ids(DB)\n\n if chat_id not in chats:\n update.message.reply_text('Did not work. Run this command inside the Ko-Lab group.')\n else:\n if add_member_id(DB, user_id): \n update.message.reply_text('I have added you to the whitelist. You can now send commands from outside the Ko-Lab chat.')\n else:\n update.message.reply_text('You are already on the whitelist.')", "def getAddObs(self):\n #print 'getAddOBS:',self.listener.addObs\n return self.listener.addObs", "def add_to_low_use(self, instance_id, creator):\n item = {\n \"InstanceID\": instance_id,\n \"Creator\": creator,\n \"Scheduled For Deletion\": False,\n \"EmailSent\": False\n }\n\n return self.low_use.put_item(Item=item)", "def addEnergy(self, wattHours, efficiency=0.9):\n\t\teffectiveEnergy = wattHours*efficiency\n\t\t#If this won't put the battery over its max charge\n\t\tif ((self.storedEnergy+effectiveEnergy)/self.energyCapacity)<self.maxState:\n\t\t\tself.storedEnergy += effectiveEnergy\n\t\t\treturn 0\n\t\telse:\n\t\t\taddedCapacity = self.maxState-(self.storedEnergy/self.energyCapacity)\n\t\t\tself.storedEnergy = self.maxState*self.energyCapacity\n\t\t\treturn (effectiveEnergy - addedCapacity*self.energyCapacity)", "def add_user(self, user):\n return self.ireporter_db.insert_data_users(\n user.get(\"firstname\"),\n user.get(\"lastname\"),\n user.get(\"othernames\"),\n user.get(\"username\"),\n user.get(\"email\"),\n user.get(\"phonenumber\"),\n user.get(\"is_admin\"),\n user.get(\"password\"),\n user.get(\"registered_on\")\n )", "def add(trie, word, weight):\n\ttrie.insert(word,weight)#we can do this since in my insert function it helps me update weight\n\treturn trie", "def test_add_beehive(self):\n # set_up_apiary()\n\n # Given:\n beehive_before = BeeHive.objects.count()\n\n # When:\n new_beehive = create_fake_beehive()\n\n # Then:\n assert BeeHive.objects.count() == beehive_before + 1", "def add_friend():\n\n\n user_id = session['user_id']\n add_friend = request.form.get(\"add-friend\")\n friend_id = request.form.get(\"friend_id\")\n friendship = Friendship.add_friend(user_id, friend_id)\n\n print \"This is the friend id\", friend_id\n\n return 'friend added'", "def add(self, test_unique=True, setiotid=True):\n\n # get existing sets self.iotid\n if not test_unique or not self.get_existing(self.api_tag):\n self.logger.info('payload {}'.format(self.payload()))\n resp = requests.post('{}/{}'.format(self.base_url, self.api_tag),\n auth=('write', self._password),\n json=self.payload())\n self.logger.info('response {}'.format(resp.text))\n self.logger.info('headers {}'.format(resp.headers))\n if setiotid:\n m = IDREGEX.search(resp.headers.get('location', ''))\n\n if m:\n iotid = m.group('id')[1:-1]\n else:\n iotid = resp.json()['@iot.id']\n\n self.setiotid(iotid)", "def add_battery():\n data = request.get_json()\n battery = battery_rent_service.add(**data)\n battery = model_to_dict(battery)\n return jsonify({'response': battery}), 200", "def add_item(self,itm):\n itms = self.get_items_list()\n if len(itms) != self.items: self.items = len(itms)\n if self.items >= self.rooms * MAX_ITEMS_PER_ROOM:\n return None\n k = itm\n x = 0\n while k in itms:\n x += 1\n k = '%s_%d'%(itm,x)\n itm_rec = SuiGallery.make_item_record(itm)\n itm_rec['z'] = self.items;\n itms[k] = itm_rec\n self.put_items_list(itms)\n self.items += 1\n return {'items':self.items,'k':k,'id':itm,'x':itm_rec['x'],'y':itm_rec['y'],'z':itm_rec['z']}", "def addapp():\n user = users.get_current_user()\n if user:\n return template('add', name=g_name, log_in_out = users.create_logout_url('/'), opt = 'ะ’ั‹ั…ะพะด', user = user.nickname())\n else:\n redirect('/')", "def staff_add(request):\n username = request.params['add']\n try:\n accounts.make_staff(username)\n except accounts.NoSuchUserError:\n request.session.flash(\n _(\"User {username} doesn't exist.\".format(username=username)),\n \"error\")\n return staff_index(request)", "def add(self, mu, which):\n # Handle case where mu already seen\n k = self.find(mu)\n if k is not None:\n self.which[k].add(which)\n return k\n # Handle case where mu not seen\n self.mu_db.append(mu.copy())\n self.which.append(set([which]))\n return self.offset+len(self.mu_db)-1" ]
[ "0.6947331", "0.5965967", "0.5626294", "0.5455911", "0.53932244", "0.5364499", "0.53162444", "0.5274032", "0.5195086", "0.5140979", "0.5055691", "0.5050816", "0.504182", "0.5026168", "0.50212723", "0.498674", "0.4972312", "0.49669898", "0.4939952", "0.49216947", "0.49164072", "0.49153167", "0.49088585", "0.49037606", "0.48987365", "0.48905817", "0.48842958", "0.48767012", "0.48565602", "0.4840214", "0.48339918", "0.48309422", "0.48272014", "0.48231947", "0.48210362", "0.48200554", "0.48139274", "0.48043695", "0.4800508", "0.47918296", "0.47812256", "0.4777839", "0.47711033", "0.4765734", "0.47636375", "0.4760824", "0.47544748", "0.47526437", "0.47524813", "0.47427574", "0.47352713", "0.4716622", "0.4709478", "0.47076434", "0.46865156", "0.46669412", "0.46508738", "0.46467227", "0.46321642", "0.46289635", "0.46165746", "0.46118668", "0.4610046", "0.45979932", "0.4596542", "0.4594927", "0.4591472", "0.4590101", "0.45834944", "0.45803937", "0.45704344", "0.45684072", "0.4567588", "0.45562658", "0.4556007", "0.4554764", "0.4549526", "0.4549153", "0.45453668", "0.45451087", "0.45416436", "0.45359293", "0.45338607", "0.45315838", "0.45315838", "0.4530795", "0.45305026", "0.45286393", "0.45263845", "0.45251825", "0.45193163", "0.45113814", "0.45091313", "0.4508871", "0.45085356", "0.45066187", "0.45046774", "0.45037854", "0.45035255", "0.4502233" ]
0.61611295
1
Edits a waifu. | You must own her.
async def edit_waifu( event, name: ('str', 'Their name?'), field : (['age', 'bio', 'hair'], 'Which field to edit?'), ): key = name.casefold() try: waifu = WAIFUS[key] except KeyError: abort(f'There is no waifu named like: {name}.') if waifu.user is not event.user: abort('You can only edit waifus added by yourself.') text_input = FIELD_TO_TEXT_INPUT[field] # We auto-fill the current value text_input = text_input.copy_with(value = FIELD_TO_ATTRIBUTE[field].__get__(waifu, Waifu)) return Form( f'Editing {waifu.name}', [text_input], custom_id = f'{CUSTOM_ID_WAIFU_EDIT_BASE}{key}', )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def edit():", "def home_edituser():\n\tpass", "def update_user():", "def update_user():\n #TODO user update \n pass", "async def add_waifu():\n return WAIFU_FORM", "def update(self, user: U) -> None:\n ...", "def edit():\n database.ask(mode='single')\n F = database.check(single=True)\n if F and hasattr(F,'edit'):\n name = database[0]\n F.edit(name)", "def edit(self):\n\n pass", "def _edit_user(self):\n users = fileIO.load_json(\"users.json\")\n print(\"The list of users is as follows: \")\n for i in users:\n print(users[i][\"name\"])\n #List specific user's settings and get user id\n userID = self._list_user_settings(users)\n #Loop until valid option given\n option = False\n while not option:\n option = input(\"Please enter the setting you would like to change: \")\n if option not in users[userID]:\n option = False\n print(\"That setting is not valid.\")\n #Get input for new setting\n args = input(\"Please enter what you would like to change that setting to: \")\n #Output\n command = \"edit_user {0} {1} {2}\\r\\n\".format(userID, option, args)\n return(command)", "def DoEdit(self,event):\r\n raise UncodedError", "def update_user(id):\n pass", "def edit_user(self):\n from editWindow import EditPlayer\n self.edit = EditPlayer(self.lang, self.result_table.currentItem().text())\n self.edit.show()", "def update(\n self,\n email,\n company_name,\n location,\n job_profile,\n salary,\n username,\n password,\n security_question,\n security_answer,\n notes,\n date_applied,\n status,\n):", "def edit(self, **kwargs):\n ...", "async def chutiya (fuks):\n if not fuks.text[0].isalpha() and fuks.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n index = random.randint(0, len(memes.CHU_STRINGS) - 1)\n reply_text = memes.FUK_STRINGS[index]\n await fuks.edit(reply_text)", "def edit(item_id):\n session = current_app.config['db']\n item = session.query(WineABV).filter_by(id=item_id).one()\n if request.method == \"POST\":\n new_name = request.form['itemname']\n item.name = new_name\n try:\n session.commit()\n except exc.IntegrityError:\n session.rollback()\n flash(\"Duplicate values!\", 'danger')\n return render_template('edit_form.html', item=item)\n\n flash(\"Successfully Edited '%s'\" % (new_name,), 'success')\n return redirect(url_for('.show'))\n else:\n return render_template(template_prefix+'edit_form.html', item=item)", "def dummy():\n\t\t\tself.edit = True", "def edit_document():", "def on_edit(self, dataobj):", "async def chutiya(fuks):\n if not fuks.text[0].isalpha() and fuks.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n index = random.randint(0, len(FUK_STRINGS) - 1)\n reply_text = FUK_STRINGS[index]\n await fuks.edit(reply_text)", "def updateUser(database):\n name=str(input(\"Which user do you want to update : \"))\n usr,find=getByName(database,name)\n if not find:\n print(\"the User could not be found\")\n return\n if find:\n print(usr)\n print(\"What do you want to change :\\n1.name\\n2.field\\n3.year of study\\n4.areas of interest\\n5.Age\\n6.City\\n7.Quit\")\n choice=int(input(\"Your choice :\"))\n if choice==1:\n usr.name=input(\"Enter the new name of the user : \").lower()\n usr.lastname=input(\"Enter the new lastname of the user : \").lower()\n elif choice==2:\n usr.fieldStudy=input(\"Enter the new field of study of the user : \")\n elif choice==3: \n usr.yearStudy=int(input(\"Enter the new year of study of the user : \"))\n elif choice==4:\n nbinterest=int(input(\"how many new interests does he have? : \"))\n for i in range(nbinterest):\n usr.interest.append(input(\"Enter the interest of the user : \"))\n elif choice==5:\n usr.age=int(input(\"Enter the age of the user : \"))\n elif choice==6:\n usr.city=input(\"Enter the city of the user : \") \n elif choice==7:\n return\n saveDatabase(database,usr)", "def test_edit_boat(self):\n pass", "def edit_name(entry):\n entry.name = get_name()\n entry.save()\n input(\"Edit successful. \")\n return entry", "def updateBuddy(self,username,online,evilness,signontime,idletime,userclass,away):\n print \"status changed for\",username", "def update(self, customerguid, name=\"\", login=\"\", password=\"\", email=\"\", address=\"\", vat=\"\", jobguid=\"\", executionparams=None):", "def updaterecord(phones,username,phonenum):\r\n if username in phones:\r\n phones[username] = phonenum\r\n else:\r\n raise ValueError(\"This username are not exist\")", "async def waifu(self, ctx):\n e = discord.Embed(title=\"Here is a waifu image for you {}.\".format(ctx.author.name), color=discord.Color.magenta())\n e.set_image(url=nekos.img('waifu'))\n await ctx.send(embed=e)", "def save_admin():\n user = users.get_current_user()\n if user:\n if users.is_current_user_admin() or is_local_admin():\n added_admin_mail = request.forms.get('mail')\n added_admin = users.User(added_admin_mail)\n if added_admin:\n #return added_admin.user_id()\n #output = added_admin.nickname()+ ' ' + added_admin.user_id()\n #return output\n new_admin = Admins(parent=admin_base)\n new_admin.ref_nick = user.nickname()\n new_admin.admin_nick = added_admin.nickname()\n new_admin.admin_id = 'no id'#added_admin.user_id()\n new_admin.put()\n redirect('/admin')\n \n else:\n #return \"ะะต ะฟะพะปัƒั‡ะธะปะพััŒ\"\n output = template('admin', name=g_name, log_in_out = users.create_logout_url('/'), opt = 'ะ’ั‹ั…ะพะด', user = user.nickname(), admins = admins, error=\"ะะตะฒะตั€ะฝั‹ะน email\")\n return output\n else:\n redirect('/')\n else:\n redirect('/')", "def modify_user(user_data):\r\n raise NotImplementedError()", "def update():\n return 'update api in put'", "def update_meal():", "def post(self):\n user_id = request.args.get('user_id')\n lastname = request.args.get('lastname')\n return update_sukunimi(user_id, updatd_lastname=lastname)", "def test_update_work_type_name_user(self):\n # login as manager\n self.authenticate(self.user)\n\n # alter the work type\n response = self.client.patch(self.url_wt1, {\"name\": \"NewName\"})\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def whoami(bot, update):\n\n id = update.effective_message.chat.id\n bot.send_message(id, \"Tรคmรคn chรคtin ID on {}\".format(id))", "def edit_anything(name, what, to):\n # Just like adding something, we use the cursor, but instead of INSERT INTO, we write UPDATE.\n # WHERE determines which activity the user wants to change\n c.execute(\"UPDATE activities SET {} = (?) WHERE name=(?)\".format(what), [to, name])\n # Now we must commit the changes that happend in the database\n conn.commit()", "def hola(update, context):\n user = update.message.from_user\n update.message.reply_text('Hola! ' + user.first_name + ' :). Tu apellido es: ' + user.last_name + ', ยฟcuรกl es tu descendencia? ')", "def exercise_user_edits(mon_lib_srv, ener_lib):\n params_edits_str = \"\"\"\n excessive_bond_distance_limit = 10\n bond {\n action = *add delete change\n atom_selection_1 = name CB\n atom_selection_2 = name CE\n symmetry_operation = None\n distance_ideal = 4\n sigma = 0.2\n slack = None\n }\n angle {\n action = *add delete change\n atom_selection_1 = name CB\n atom_selection_2 = name CD\n atom_selection_3 = name NZ\n angle_ideal = 120\n sigma = 3\n }\n dihedral {\n action = *add delete change\n atom_selection_1 = name C\n atom_selection_2 = name CA\n atom_selection_3 = name CB\n atom_selection_4 = name CG\n angle_ideal = 90\n sigma = 10\n periodicity = 1\n }\n planarity {\n action = *add delete change\n atom_selection = name CB or name CD or name CE\n sigma = 0.2\n }\n parallelity {\n action = *add delete change\n atom_selection_1 = name N or name CA or name O\n atom_selection_2 = name CB or name CD or name NZ\n sigma = 0.027\n target_angle_deg = 0\n } \"\"\"\n master_phil = iotbx.phil.parse(\n mmtbx.monomer_library.pdb_interpretation.geometry_restraints_edits_str)\n user_phil = iotbx.phil.parse(params_edits_str)\n working_phil = master_phil.fetch(sources=[user_phil])\n # working_phil.show()\n\n wp_extract = working_phil.extract()\n # print wp_extract.bond[0].atom_selection_1\n # STOP()\n # edits = None\n geometry, xrs = make_initial_grm(mon_lib_srv, ener_lib, raw_records1, wp_extract)\n # Check the .geo file\n geo_fname = \"pdb_interpretation_tst_edits_exercise_user_edits.geo\"\n geometry.write_geo_file(file_name=geo_fname, site_labels=xrs.scatterers().extract_labels())\n with open(geo_fname, 'r') as f:\n user_suppl_count = 0\n for l in f.readlines():\n if l.startswith(\"User supplied\"):\n user_suppl_count += 1\n # Right now user-supplied planarity is missing from the .geo file.\n assert user_suppl_count == 5, \"Expected 5 user-supplied restraints, got %i in the .geo\" % user_suppl_count\n\n # initial\n assert geometry.pair_proxies().bond_proxies.simple.size() == 9\n assert geometry.pair_proxies().bond_proxies.asu.size() == 0\n assert geometry.angle_proxies.size() == 9\n assert geometry.dihedral_proxies.size() == 7\n assert geometry.planarity_proxies.size() == 1\n assert geometry.parallelity_proxies.size() == 1\n\n ubonds_simpe, ubonds_asu, uangles, udihedrals, uplanarity, uparallelity = \\\n geometry.get_user_supplied_restraints()\n assert ubonds_simpe.size() == 1\n assert ubonds_asu.size() == 0\n assert uangles.size() == 1\n assert udihedrals.size() == 1\n assert uplanarity.size() == 1\n assert uparallelity.size() == 1\n # make sure geometry stays the same\n assert geometry.pair_proxies().bond_proxies.simple.size() == 9\n assert geometry.pair_proxies().bond_proxies.asu.size() == 0\n assert geometry.angle_proxies.size() == 9\n assert geometry.dihedral_proxies.size() == 7\n assert geometry.planarity_proxies.size() == 1\n assert geometry.parallelity_proxies.size() == 1\n # test functions one by one\n simple, asu = geometry.get_bond_proxies_without_user_supplied()\n assert simple.size() == 8\n assert asu.size() == 0\n angle = geometry.get_angle_proxies_without_user_supplied()\n assert angle.size() == 8\n dihed = geometry.get_dihedral_proxies_without_user_supplied()\n assert dihed.size() == 6\n plan = geometry.get_planarity_proxies_without_user_supplied()\n assert plan.size() == 0\n par = geometry.get_parallelity_proxies_without_user_supplied()\n assert par.size() == 0\n # make sure geometry stays the same\n assert geometry.pair_proxies().bond_proxies.simple.size() == 9\n assert geometry.pair_proxies().bond_proxies.asu.size() == 0\n assert geometry.angle_proxies.size() == 9\n assert geometry.planarity_proxies.size() == 1\n assert geometry.parallelity_proxies.size() == 1", "def edit_UI_transaction(account):\n\t_day = read_day()\n\t_amount = read_amount()\n\t_type = read_type()\n\ttransaction_at = transaction_exists(_day, _amount, _type, account)\n\tif (transaction_at != -1):\n\t\tprint('Actualizare tranzactie...')\n\t\t_day = read_day()\n\t\t_amount = read_amount()\n\t\t_type = read_type()\n\t\tedit_transaction(transaction_at, _day, _amount, _type, account)\n\t\tprint('Tranzactie actualizata.')\n\telse:\n\t\tprint('Tranzactie inexistenta.')", "def do_edit(self, args):\n member = None\n rowid = args.split(' ')[0]\n \n # loop till we get a rowid which matches a member in the database\n while True:\n rowid = self.validateRowid(rowid)\n if rowid is None:\n rowid = input('Enter member id: ')\n continue\n \n member = self.roster.get(rowid)\n if member is None:\n print(\"No member with id of %d\" % rowid)\n # rowid will get validated again, but it's the same value\n # which already passed validation\n continue\n \n break\n \n print('Editing %s %s' % (member.first, member.last))\n print('Type new value, hit enter to keep current value, or enter spaces to clear a value')\n member.first = self.getNewValue('First name', member.first)\n member.last = self.getNewValue('Last name', member.last)\n member.introducedDate = self.getNewValue('introduced date', member.introducedDate) \n \n self.roster.update(member)", "def edit_notes(entry):\n entry.notes = get_notes()\n entry.save()\n input(\"Edit successful. \")\n return entry", "async def _e_edit(self, ctx, name, *, text):\n event = self.database.get_guild_event(ctx.guild.id, name)\n if not event:\n await ctx.send(\"That event doesn't exist. Maybe you meant to `add` it instead?\")\n return\n event.name = name\n event.text = text\n self.database.save_item(event)\n await ctx.send(f\"Event {name} successfully edited\")", "def user_edit(request):\n DEBUG = False\n\n if not has_permission('editUser', request.context, request):\n #print \"NOT has_permission !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n request.message = \"You do not have permissions to edit this user!\"\n raise HTTPForbidden\n\n # if no user_id in URL and not logged in, tell user to login\n\n try:\n user_id = request.matchdict['user_id']\n except KeyError, ke:\n #print ke\n return HTTPFound(location=request.route_url('not_found'))\n\n user = User.get_by_user_id(user_id)\n\n if user is None:\n msg = \"User was not founf in database.\"\n return HTTPFound(location=request.route_url('not_found'))\n\n form = Form(request, schema=UserSettingsSchema, obj=user)\n\n if 'form.submitted' in request.POST and not form.validate():\n # form didn't validate\n request.session.flash('Please check the form below for errors!')\n if DEBUG: # pragma: no cover\n print \"submitted but not validated!\"\n\n if 'form.submitted' in request.POST and form.validate():\n # ready for changing database entries!\n request.session.flash('form validated!')\n if DEBUG: # pragma: no cover\n print \"the form was submitted and validated.\"\n\n if form.data['surname'] != user.surname:\n if DEBUG: # pragma: no cover\n request.session.flash('surname was not same --> changing')\n print \"changing surname\"\n user.surname = form.data['surname']\n if form.data['lastname'] != user.lastname:\n if DEBUG: # pragma: no cover\n request.session.flash('lastname was not same --> changing')\n print \"changing lastname\"\n user.lastname = form.data['lastname']\n if form.data['email'] != user.email:\n request.session.flash('email was not same --> changing')\n user.email = form.data['email']\n if form.data['phone'] != user.phone:\n request.session.flash('phone was not same --> changing')\n user.phone = form.data['phone']\n if form.data['fax'] != user.fax:\n request.session.flash('fax was not same --> changing')\n user.fax = form.data['fax']\n if form.data['street'] != user.street:\n request.session.flash('street was not same --> changing')\n user.street = form.data['street']\n if form.data['number'] != user.number:\n request.session.flash('number was not same --> changing')\n user.number = form.data['number']\n if form.data['city'] != user.city:\n request.session.flash('city was not same --> changing')\n user.city = form.data['city']\n if form.data['postcode'] != user.postcode:\n request.session.flash('postcode was not same --> changing')\n user.postcode = form.data['postcode']\n if form.data['country'] != user.country:\n request.session.flash('country was not same --> changing')\n user.country = form.data['country']\n\n if DEBUG: # pragma: no cover\n print \"returning the form\"\n return {\n 'the_user_id': user_id,\n 'the_username': user.username,\n 'form': FormRenderer(form),\n }", "def __ui_update_person(self):\n to_update_person_id = int(input(\"Introduce the ID of the person you want to update: \"))\n updated_person_name = input(\"Updated name: \").strip()\n updated_phone_number = input(\"Updated phone number: \").strip()\n self.__person_service.service_update_person(to_update_person_id, updated_person_name, updated_phone_number)\n print(\"Person successfully updated!\\n\")", "def test_none_admin_edit(self):\n\n with self.client:\n token = self.customer()\n id = 1\n response = self.client.put('api/v1/meals/{}'.format(id),\n data=json.dumps(dict(\n meal_name=\"chips\",\n price=15000\n )),\n content_type='application/json',\n headers=({\"token\": token}))\n data = json.loads(response.data.decode())\n self.assertEqual(data.get('message'),\n \"Customer is not authorized to access this page\")\n self.assertEqual(response.status_code, 401)", "def edit(tesserae, tessera_id):\n try:\n return tesserae.edit(tessera_id)\n except TesseraError, e:\n sys.stderr.write(\"Error: %s\\n\", str(e))\n return False", "def test_update_user(self):\n pass", "def check_action_edit_form(self, staff_ob, number, new_info):\n \n print(\"\\nS Save \\nB Back\\n\")\n action_str = self.choose_action([\"s\",\"b\"])\n while action_str == False:\n action_str = self.choose_action([\"s\", \"b\"])\n\n if action_str == \"s\":\n if number == 1:\n if staff_ob.role == self.PILOT.capitalize():\n updated_staff_ob = PilotsModel(staff_ob.ssn, staff_ob.name, staff_ob.role, staff_ob.rank, staff_ob.license_type, new_info, staff_ob.mobile_number, staff_ob.email)\n else:\n updated_staff_ob = CabinCrewModel(staff_ob.ssn, staff_ob.name, staff_ob.role, staff_ob.rank, new_info, staff_ob.mobile_number, staff_ob.email)\n elif number == 2:\n if staff_ob.role == self.PILOT.capitalize():\n updated_staff_ob = PilotsModel(staff_ob.ssn, staff_ob.name, staff_ob.role, staff_ob.rank, staff_ob.license_type, staff_ob.address, new_info, staff_ob.email)\n else:\n updated_staff_ob = CabinCrewModel(staff_ob.ssn, staff_ob.name, staff_ob.role, staff_ob.rank, staff_ob.address, new_info, staff_ob.email)\n elif number == 3:\n if staff_ob.role == self.PILOT.capitalize():\n updated_staff_ob = PilotsModel(staff_ob.ssn, staff_ob.name, staff_ob.role, staff_ob.rank, staff_ob.license_type, staff_ob.address, staff_ob.mobile_number, new_info)\n else:\n updated_staff_ob = CabinCrewModel(staff_ob.ssn, staff_ob.name, staff_ob.role, staff_ob.rank, staff_ob.address, staff_ob.mobile_number, new_info)\n \n if updated_staff_ob.role == self.PILOT.capitalize():\n self.llapi.update_new_pilot_information(updated_staff_ob)\n else:\n self.llapi.update_new_crew_member_information(updated_staff_ob)\n\n elif action_str == \"b\":\n return", "async def _edit_game_given(self, game_id, given):\n\n await self.bot.db.execute(\n \"\"\"\n UPDATE giveaways_game\n SET given = :given\n WHERE game_id = :game_id\n \"\"\",\n {\n 'game_id': game_id,\n 'given': int(given),\n }\n )\n\n await self.bot.db.commit()", "def edit_person():\n # get person name from user\n responses = accept_inputs([\"Person's name\"])\n person_name = responses[\"Person's name\"]\n # check for existence\n results = query_with_results(\"select * from person where name = ?\", [person_name])\n if len(results) == 0:\n print(\"No person found with name '%s'.\" % person_name)\n return\n else:\n # get id of person\n id = query_with_results(\"select id from person where name = ?\", [person_name])[0][0]\n # the task exists, so ask the user for the new description\n responses = accept_inputs([\"New name\"])\n # update db\n query_no_results(\"update person set name = ? where id = ?\", [responses[\"New name\"], id])\n print(\"Person with old name '%s' changed to '%s'.\" % (person_name, responses[\"New name\"]))", "def test_update(self):\n\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.update(TOOLNAME,username,userpass)", "def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)", "async def chutiya(chus):\n if not chus.text[0].isalpha() and chus.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n index = random.randint(0, len(CHU_STRINGS) - 1)\n reply_text = CHU_STRINGS[index]\n await chus.edit(reply_text)", "def update():", "def update():", "def set_admin():\n print(\"Insert admin email:\")\n return input()", "def save_edit(self):\r\n self.driver.find_element_by_xpath('//*[@id=\"vnos\"]/span[1]/div/a').click()", "def change():", "def edit(self,item=None):\r\n raise AbstractError\r\n return False", "def edit_profile(self, name, username, email):\n return self.app.post('/_editProfile', data = dict(\n name = name,\n username = username,\n email = email\n ), follow_redirects = True)", "def save():\n user = users.get_current_user()\n if user:\n new_app = Applic(parent=base_key)\n new_app.user = user.user_id()\n new_app.username = user.nickname()\n new_app.content = request.forms.get('content')\n new_app.title = request.forms.get('title') \n new_app.put()\n redirect('/')\n else:\n redirect('/')", "def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]", "def update( ):\r\n pass", "async def name(self, ctx:utils.Context, *, username:str):\n\n if len(username) > 32:\n await ctx.send('That username is too long.')\n return\n await self.bot.user.edit(username=username)\n await ctx.send('Done.')", "def test_change_user(self):\n self.go200('minus_upload')\n self.formfile('minus_upload', 'file', AUDIO_FILE)\n \n self.config(\"readonly_controls_writeable\", 1)\n self.fv('minus_upload', 'user', '2')\n self.submit200()\n self.config(\"readonly_controls_writeable\", 0)\n minus = MinusRecord.objects.all()[0]\n self.url('minus_detail', [minus.author, minus.id])\n self.assert_equal(minus.user, self.superuser)", "def update_after(channel, action, where, ebrios, user):\n if where:\n trampa = ['Office1', 'Office2']\n trampa.remove(where)\n response = ''\n if not ebrios[where]:\n if user in ebrios[trampa[0]]:\n response = \"Pero si estรกs en el otro...\"\n else:\n response = 'No hay after armado, podrรญas armar uno <@' + user + '>'\n return postea(channel, response)\n elif action in yeah_action:\n if user in ebrios[trampa[0]]:\n response = \"Ya estรกs en el otro after, vas a meter viajecito?\"\n elif user not in ebrios[where]:\n ebrios[where].append(user)\n if(not bardear(channel, user)):\n response = '<@' + user + '> se suma! :beer:'\n else:\n response = 'Ya estabas en la lista...'\n elif action in boo_action:\n if user in ebrios['Office1']:\n where = 'Office1'\n ebrios[where].remove(user)\n response = '<@' + user + '> se baja... :snowflake:'\n elif user in ebrios['Office2']:\n where = 'Office2'\n ebrios[where].remove(user)\n response = '<@' + user + '> se baja... :snowflake:'\n else:\n response = 'Ni estรกs en la lista, <@' + user + '>'\n if not ebrios[where]:\n response += '\\nBueh, re ortibas, no queda nadie! Se cancela!'\n else:\n return postea(channel, 'No entendรญ')\n postea(channel, response)\n return ebrios[where]", "def edit(self, *args, **kw):\n id_tipo_item = UrlParser.parse_id(request.url, \"tipositems\")\n url_action = \"../\"\n \n pp = PoseePermiso('redefinir tipo item',\n id_tipo_item=id_tipo_item)\n if not pp.is_met(request.environ):\n flash(pp.message % pp.nombre_permiso, 'warning')\n redirect(url_action)\n tmpl_context.widget = self.edit_form\n value = self.edit_filler.get_value( \\\n values={'id_atributos_por_tipo_item': int(args[0])})\n value['_method'] = 'PUT'\n page = \"Atributo {nombre}\".format(nombre=value[\"nombre\"])\n return dict(value=value, \n page=page, \n atras=url_action)", "def edit(user_id):\n if user_id != current_user.id:\n return abort(403)\n\n user = get_user(user_id)\n form = EditForm(obj=user)\n form.email.data = user.email\n\n if form.validate_on_submit():\n password = form.password.data\n username = form.username.data\n\n save_result = edit_user(user_id, password, username, user.active)\n user = save_result['entry']\n form = EditForm(request.form, obj=save_result['entry'])\n form.email.data = user.email\n return redirect(url_for('.index'))\n \n return render_template('users/edit.html'\n ,form=form\n ,user=user\n ,t=t\n ,m=m)", "def test_5_editautor(self):\n for i in self.app._listarAutores():\n self.app.editAutor(id_autor=i[\"id_autor\"],\n nome=\"%s-edited\" % i[\"nome\"],\n email=\"%s-edited\" % i[\"email\"],\n grupo=\"%s-edited\" % i[\"grupo\"])", "def allow_to_edit(user):\n return allow_to_edit_well(user)", "def update_adminhod_view(request):\r\n # get current adminhod.\r\n adminhod = get_object_or_404(AdminHOD, user__id=request.user.id) \r\n # display adminhod's initial data.\r\n user_form = UpdateUserForm(\r\n request.POST or None,\r\n staff_student=adminhod, \r\n instance=adminhod,\r\n initial={'full_name': adminhod.user.full_name,\r\n 'email': adminhod.user.email, \r\n })\r\n if request.method == 'POST':\r\n if user_form.is_valid():\r\n # update adminhod.\r\n adminhod.user.full_name = user_form.cleaned_data.get(\"full_name\")\r\n adminhod.user.email = user_form.cleaned_data.get(\"email\")\r\n adminhod.user.save()\r\n # Display success message.\r\n messages.success(request, f'Your profile has been updated successfully.', extra_tags='update-adminhod-profile')\r\n return redirect('adminhod:update-adminhod-profile') \r\n context = {'user_form':user_form}\r\n return render(request, 'adminhod/update_adminhod_profile.html', context)", "def mod_user(self, username, data):\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(u\"users/{}\".format(username))\n res = requests.put(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code == 200:\n return Response(0, u\"User {} has been modified\".format(username))\n else:\n return Response(res.status_code, res)", "def update_item(self, id: str, user: User, **kwargs) -> None:", "def modificacion(self, socio):\n\n aux = self.buscar(socio.id)\n print('El socio a modificar en capa de datos:', aux.id, aux.nombre)\n\n if aux == None:\n return False\n else:\n #persona = session.query(Socio).filter(Socio.dni == aux.id)\n aux.nombre = socio.nombre\n aux.apellido = socio.apellido\n aux.dni = socio.dni\n\n session.commit()\n\n return aux", "def update(request):\n return 0", "def update(challenge, request):\n challenge.name = request.form['name']\n challenge.description = request.form['description']\n challenge.value = int(request.form.get('value', 0)) if request.form.get('value', 0) else 0\n challenge.max_attempts = int(request.form.get('max_attempts', 0)) if request.form.get('max_attempts', 0) else 0\n challenge.unlock_at = int(request.form.get('unlock_at', 0)) if request.form.get('unlock_at', 0) else 0\n challenge.category = request.form['category']\n challenge.hidden = 'hidden' in request.form\n db.session.commit()\n db.session.close()", "def update(id):\n autor = get_autor(id)\n error = None\n if request.method == 'POST':\n nome = request.form['nome']\n\n if not nome:\n error = 'Nome รฉ obrigatรณrio.'\n print(autor)\n if nome == autor['nome']:\n return redirect(url_for('autor.index'))\n\n else:\n try:\n if verifica_autor_bd(nome):\n error = 'Este autor jรก estรก registrado!'\n else:\n db.insert_bd('UPDATE autor set nome = \"%s\" where id = %d' % (nome, id))\n return redirect(url_for('autor.index'))\n except:\n return render_template('404.html')\n\n return render_template('autor/update.html', autor=autor, error=error)", "def modificationUsers(database):\n print(\"What do you want to do?\\n1. Insert a user\\n2.delete an user\\n3.Update an users\\n4. Quit\")\n choice=int(input(\"Your choice :\"))\n if choice==1:\n AddUser(database)\n elif choice==2:\n DelteUser(database)\n elif choice==3: \n updateUser(database)\n elif choice==4:\n return", "def update_challenger(request, game, name):\r\n ttt = TicTacToeGame.objects.get(id=int(game))\r\n ttt.player = name\r\n ttt.save()\r\n return HttpResponse(200)", "def update_entry(self, user, entry, change):\r\n self.curs.execute(f\"\"\"UPDATE {user} SET password = ? WHERE application = ? \"\"\", (change, entry))\r\n self.conn.commit()", "def moglyv_stud(update, context):\n #update.callback_query.message.reply_text('ะฃ ะฝะฐั ั” ะฑะฐะณะฐั‚ะพ ั†ั–ะบะฐะฒะธั… ะผะพะถะปะธะฒะพัั‚ะตะน ะดะปั ัั‚ัƒะดะตะฝั‚ั–ะฒ. ะ— ั‡ะพะณะพ ะฟะพั‡ะฝะตะผะพ? ')\n \n kb_moglyv_stud = [[InlineKeyboardButton(\"ะŸั€ะพั”ะบั‚ะฝะต ะฝะฐะฒั‡ะฐะฝะฝั\",callback_data = \"proekt_nav\")],\n [InlineKeyboardButton(\"ะ”ัƒะฐะปัŒะฝะฐ ะพัะฒั–ั‚ะฐ\",callback_data = \"du_osvita\")],\n [InlineKeyboardButton(\"ะŸั€ะฐั†ะตะฒะปะฐัˆั‚ัƒะฒะฐะฝะฝั\",callback_data = \"pracevl\")],\n [InlineKeyboardButton(\"ะŸั€ะฐะบั‚ะธะบะฐ\",callback_data = \"prakt\")]]\n \n\n \n reply = InlineKeyboardMarkup(kb_moglyv_stud)\n \n update.callback_query.message.reply_text('ะฃ ะฝะฐั ั” ะฑะฐะณะฐั‚ะพ ั†ั–ะบะฐะฒะธั… ะผะพะถะปะธะฒะพัั‚ะตะน ะดะปั ัั‚ัƒะดะตะฝั‚ั–ะฒ. ะ— ั‡ะพะณะพ ะฟะพั‡ะฝะตะผะพ?', reply_markup = reply)", "def editarCadastro(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "async def uwu(self, ctx, *, message):\n uwus = ['UwU', 'xwx', 'DwD', 'รšwรš', 'uwu', 'โ˜†wโ˜†', 'โœงwโœง',\n 'โ™ฅwโ™ฅ', '๏ธ uw ๏ธ u', '(uwu)', 'OwO', 'owo', 'Owo', 'owO', '( อกยฐ อœส– อกยฐ)']\n res = message.replace(\"r\", \"w\").replace(\n \"l\", \"w\").replace(\"L\", \"W\").replace(\"R\", \"W\")\n res = res.replace(\"the \", \"da \").replace(\n \"The \", \"Da \").replace(\"THE \", \"DA \")\n res = res.replace(\"th\", \"d\").replace(\"TH\", \"D\")\n res = res.replace(\"\\n\", \" \" + random.choice(uwus) + \"\\n\")\n # and send one \"as\" da usew who invoked da command รšwรš\n await ctx.send(f\"{res + ' ' + random.choice(uwus)}\")", "def edit(self, hardware_id, userdata=None, hostname=None, domain=None,\r\n notes=None):\r\n\r\n obj = {}\r\n if userdata:\r\n self.hardware.setUserMetadata([userdata], id=hardware_id)\r\n\r\n if hostname:\r\n obj['hostname'] = hostname\r\n\r\n if domain:\r\n obj['domain'] = domain\r\n\r\n if notes:\r\n obj['notes'] = notes\r\n\r\n if not obj:\r\n return True\r\n\r\n return self.hardware.editObject(obj, id=hardware_id)", "def _edit_setting(self):\n settings = fileIO.load_json(\"settings.json\")\n self._list_settings(settings=settings)\n option = False\n while not option: #While loop until valid setting given\n option = input(\"Please type the setting you would like to change: \")\n if option not in settings:\n option = False\n newSetting = input(\"Please enter what you would like to change that setting to: \")\n command = \"edit_setting {0} {1}\".format(option, newSetting)\n return(command)", "async def update_hacks_content(self, attacker_id: int) -> None:\n\n mycursor, db = await the_database()\n await mycursor.execute(\"UPDATE SlothSkills SET content = 'virus' WHERE user_id = %s\", (attacker_id,))\n await db.commit()\n await mycursor.close()", "def opinion_edit(\n opinion,\n decision,\n type_,\n lsd,\n notes,\n force,\n user,\n):\n if user:\n try:\n wikiuser = db.mediawiki.MediaWikiUser.get(\n user_name=user if user else get_user()\n )\n except pw.DoesNotExist:\n raise click.BadParameter(\n \"Unknown user '%s'. Supply a valid MediaWiki username with -u <user>.\"\n % user,\n param_hint=\"user\",\n )\n opinion.user = wikiuser\n\n if decision:\n opinion.decision = decision\n\n if type_:\n opinion.type = type_\n\n if lsd:\n opinion.lsd = lsd\n\n if notes:\n opinion.notes = notes\n\n opinion.last_edit = arrow.utcnow().int_timestamp\n\n if force:\n opinion.save()\n else:\n click.echo(\"Edited opinion:\\n\")\n click.echo(format_opinion(opinion))\n if click.confirm(\"Commit changed opinion?\"):\n opinion.save()\n click.echo(\"Success.\")\n else:\n click.echo(\"Aborted.\")", "def change_datta(self):\n column = \"\"\n while column not in ['lastname', 'firstname', 'pseudo', 'email', 'age', 'password']:\n column = input(\"entry champ to change \\n[lastname][firstname], [pseudo], [email], [age], [password]\")\n datta = input(\"enter new datta:\")\n self.user_choice.initialize_connection()\n self.user_choice.cursor.execute(\"UPDATE users set \" + column + \" = %s WHERE pseudo = %s;\", (datta, self.pseudo,))\n self.user_choice.connection.commit()\n self.user_choice.close_connection()", "def editRecord(self):\n selectedData = self.controller.chooseRecord(\"Enter the record number: \") - 1\n print(self.dto.getRecord()[selectedData].__dict__)\n if self.controller.confirmMsg(\"Do you want to edit this data? (y/n): \") == \"y\":\n self.controller.modifyData(self.dto.getRecord()[selectedData])\n print(\"Record edited.\")", "def mailissue(request):\n if not request.issue.edit_allowed:\n if not IS_DEV:\n return HttpTextResponse('Login required', status=401)\n issue = request.issue\n msg = _make_message(request, issue, '', '', True)\n issue.put()\n msg.put()\n\n return HttpTextResponse('OK')", "def umrechnen(self):\n\n self.setHTML(\n self.strat.change(self.ui.betragInput.value(), self.ui.waehrungInput.text(), self.ui.zielInput.text()))\n\n self.ui.statusLabel.setText(\"OK\")", "def update_user(self, userId, newName, newPhone):\n\n try:\n query = \"update user set userName = '{}', phone='{}' where userId ={}\".format(newName, newPhone, userId)\n print(query)\n cur = self.con.cursor()\n cur.execute(query)\n self.con.commit()\n\n logger.info(\"updated\")\n except Exception as e:\n logger.error(\"Error occured at data Update \", e)", "async def univsaye(cowmsg):\n if not cowmsg.text[0].isalpha() and cowmsg.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n arg = cowmsg.pattern_match.group(1).lower()\n text = cowmsg.pattern_match.group(2)\n\n if arg == \"cow\":\n arg = \"default\"\n if arg not in cow.COWACTERS:\n return\n cheese = cow.get_cow(arg)\n cheese = cheese()\n\n await cowmsg.edit(f\"`{cheese.milk(text).replace('`', 'ยด')}`\")", "async def univsaye(cowmsg):\n if not cowmsg.text[0].isalpha() and cowmsg.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n arg = cowmsg.pattern_match.group(1).lower()\n text = cowmsg.pattern_match.group(2)\n\n if arg == \"cow\":\n arg = \"default\"\n if arg not in cow.COWACTERS:\n return\n cheese = cow.get_cow(arg)\n cheese = cheese()\n\n await cowmsg.edit(f\"`{cheese.milk(text).replace('`', 'ยด')}`\")", "async def univsaye(cowmsg):\n if not cowmsg.text[0].isalpha() and cowmsg.text[0] not in (\"/\", \"#\", \"@\", \"!\"):\n arg = cowmsg.pattern_match.group(1).lower()\n text = cowmsg.pattern_match.group(2)\n\n if arg == \"cow\":\n arg = \"default\"\n if arg not in cow.COWACTERS:\n return\n cheese = cow.get_cow(arg)\n cheese = cheese()\n\n await cowmsg.edit(f\"`{cheese.milk(text).replace('`', 'ยด')}`\")", "def test_update_self_fail(self):\n new_user = self.create_user('1')\n url = '/0/chefs/' + str(new_user.pk)\n\n headers = self.login()\n resp = self.client.put(url, **headers)\n self.assertInvalidCredentials(resp)", "def edit_customer(customer_id, password, name, email, phone):\n with MY_CONNECTION as connection:\n connection.execute(\n \"\"\"\n UPDATE Customers\n SET password=?, customer_name=?, phone=?, email=?\n WHERE id_customer=?\n \"\"\",\n (password, name, phone, email, customer_id))", "def _do_setze_aktiven_spruch(self, chat_id, user_id, args, update):\n \n keyboard = self.build_inline_keyboard_active(user_id)\n if keyboard == None:\n self.tclient.send_message('Ich habe noch keinen Nasenspruch von dir gespeichert.', user_id)\n return\n self.tclient.send_message('Lass dir alle deine gespeicherten Sprรผche mit /alle_meine_sprueche anzeigen.\\nDein aktiver Spruch ist der, der mittels /mein_Spruch ausgegeben wird. Welchen Spruch mรถchtest du als aktiven Spruch auswรคhlen?', user_id, keyboard)", "def test_updating_dietitian_account(self):\n \n form_data = {\"fname\": \"Jill\", \"lname\": \"Jones\", \n \"email\": \"jill23@gmail.com\", \"street-address\": \"33 Blue St\", \n \"city\": \"San Francisco\", \"state\": \"CA\", \"zipcode\": \"43223\"}\n\n update_dietitian_account(1, form_data)\n\n dietitian = Dietitian.query.get(1)\n self.assertEqual(\"Jill\", dietitian.fname)", "def update_data(wname,uname,pword):\n try:\n query = '''UPDATE Password SET username = ?, pass = ? WHERE website = ?'''\n cur.execute(query,(uname,pword,wname))\n conn.commit()\n except Exception as e:\n print(e)", "async def _c_edit(self, ctx, name, *, text):\n if not self.database.get_guild_command(ctx.guild.id, name):\n await ctx.send(\"That command doesn't exist. Maybe you meant to `add` it instead?\")\n return\n self.database.save_item(sql.GuildCommand((ctx.guild.id, name, text)))\n await ctx.send(f\"Command {name} successfully edited\")" ]
[ "0.703681", "0.67200714", "0.6531209", "0.6521494", "0.64730537", "0.6406972", "0.6234079", "0.61912", "0.61450255", "0.60919994", "0.60782635", "0.59584224", "0.59353", "0.5903634", "0.5852486", "0.5777315", "0.57746786", "0.5765526", "0.5761504", "0.57450265", "0.57444936", "0.573084", "0.5703039", "0.5681816", "0.56401265", "0.5632136", "0.56269354", "0.5626748", "0.5609824", "0.5601811", "0.55998945", "0.5571735", "0.5567511", "0.55527776", "0.5527012", "0.55242264", "0.55207235", "0.55201286", "0.5513267", "0.551229", "0.5511593", "0.5493551", "0.5481621", "0.5471315", "0.54628724", "0.5459702", "0.5449241", "0.54454225", "0.5429525", "0.5424469", "0.54224235", "0.54210037", "0.54191494", "0.54191494", "0.54163957", "0.5415862", "0.5404128", "0.53962123", "0.5395389", "0.5375998", "0.5375051", "0.5367072", "0.53664404", "0.53634965", "0.5355284", "0.53476954", "0.53422755", "0.53287727", "0.53255993", "0.5321737", "0.5314794", "0.53072554", "0.53019476", "0.53015995", "0.5301164", "0.5299228", "0.52991116", "0.52959234", "0.52944297", "0.52896124", "0.5284057", "0.5281159", "0.5280051", "0.526892", "0.5266804", "0.5266677", "0.5264757", "0.52599293", "0.52480793", "0.52410674", "0.52408844", "0.5240557", "0.5240557", "0.5240557", "0.5236961", "0.52368325", "0.52364045", "0.5232074", "0.52283424", "0.5222012" ]
0.7640311
0
Shows the given custom emoji.
async def show_emoji( emoji_name: str ): emoji = parse_emoji(emoji_name) if emoji is None: abort('Please give an emoji') if emoji.is_unicode_emoji(): abort('Cannot link unicode emojis.') return f'**Name:** {emoji} **Link:** {emoji.url}'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def emoji(self, ctx):\n emb = discord.Embed(colour=self.color)\n emb.add_field(name='Usage', value=f'```{self.bot.command_prefix}emoji <emojiname>```')\n await ctx.message.edit(embed=emb)", "def is_custom_emoji(self):\n ...", "async def getemoji(self, ctx):\n pass", "def emoji(self):\n return self._manager.get_emoji(self.name)", "async def emojireact(self, ctx):\n if ctx.invoked_subcommand is None:\n guild = ctx.message.guild\n guild_emoji = await self.config.guild(guild).guild()\n unicode_emoji = await self.config.guild(guild).unicode()\n if ctx.channel.permissions_for(ctx.me).embed_links:\n em = discord.Embed(colour=discord.Colour.blue())\n em.title = _(\"Emojireact settings for \") + guild.name\n if guild_emoji:\n em.add_field(name=_(\"Server Emojis \"), value=str(guild_emoji))\n if unicode_emoji:\n em.add_field(name=_(\"Unicode Emojis \"), value=str(unicode_emoji))\n if len(em.fields) > 0:\n await ctx.send(embed=em)\n else:\n msg = _(\"Emojireact settings for \") + guild.name + \"\\n\"\n if guild_emoji:\n msg += _(\"Server Emojis \") + str(guild_emoji) + \"\\n\"\n if unicode_emoji:\n msg += _(\"Unicode Emojis \") + str(unicode_emoji) + \"\\n\"\n await ctx.send(msg)", "def is_unicode_emoji(self):\n ...", "def test__Emoji__is_custom_emoji():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_false(emoji.is_custom_emoji())\n \n emoji = Emoji()\n vampytest.assert_true(emoji.is_custom_emoji())", "async def openmoji(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"openmoji\", emoji)", "def emoji(self, new_emoji):\n self._manager._items[self.name][\"emoji\"] = new_emoji", "def what_means_emoji(emoji: str) -> str:\n try:\n return unicodedata.name(emoji)\n except TypeError:\n return \"Not found\"", "async def emoji(self, ctx, *, text):\n await ctx.message.delete()\n new_msg = \"\"\n for char in text:\n if char.isalpha():\n new_msg += char_to_emoji(char) + ' '\n elif char == ' ':\n new_msg += ' '\n elif char.isspace():\n new_msg += char\n\n if len(new_msg):\n await ctx.send(new_msg)", "def what_means_emoji(emoji):\r\n try:\r\n return unicodedata.name(emoji)\r\n except TypeError:\r\n return \"Not found\"", "def get_emoji(self, icon):\n emojis = self.config['emojis']\n emoji = emojis.get(icon, '')\n return emoji", "async def facebook(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"facebook\", emoji)", "def show(self) -> str:\n return f'[{self.font}]{self.text}[{self.font}]' if self.font else self.text", "async def unicodename(self, ctx, glyph):\n if len(glyph) > 5:\n await ctx.send(\"The input must be a single unicode (non-custom) emoji.\")\n return\n await ctx.send(inline(''.join(f'\\\\N{{{unicodedata.name(c)}}}' for c in glyph)))", "async def apple(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"apple\", emoji)", "def __repr__(self) -> str:\n return \"<dumbEmoji-\" + (\"id\" if self.isID else \"unicode\") + \":\" + (str(self.id) if self.isID else self.unicode) + \">\"", "async def getemojiid(ctx, emoji: discord.Emoji):\n return await ctx.send(f\"{emoji} - `{emoji}`\")", "async def mozilla(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"mozille\", emoji)", "async def emojis(self, ctx):\n\n\t\tawait self.message_leaderboard(ctx, \"emojis\")", "def showEmoticonList(self):\n print \"Guess what? No emoticons. But I'll put in a random one for you\"\n self.appendImageAtCursor(\"throbber.gif\")", "def find_emoji(term: str) -> None:\n term = term.lower()\n emoji_mapping = _make_emoji_mapping()\n\n for emoji, name in emoji_mapping.items():\n if term in name:\n print(f\"{name.strip().title():>42} | {emoji}\")\n else:\n print(\"no matches\")", "async def emojidex(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"emojidex\", emoji)", "async def htc(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"htc\", emoji)", "def select_emoji():\n Selects\n try:\n emoji_icon = driver.find_element_by_xpath('//*[@id=\"content\"]/div/div/div/div[2]/div/div/div[2]/div[3]/div/div/div/div/div[5]/div[3]')\n emoji_icon.click()\n emojis = driver.find_elements_by_class_name('emoji-mart-emoji')\n emojis[0].click()\n click_on('type here')\n wait()\n except Exception as e:\n return \"Error: \" + str(e)\n return \"Success\"", "async def message(description = None, **kwargs):\n if not kwargs.get(\"color\"):\n kwargs[\"color\"] = discord.Color(0x82b1ff)\n\n return discord.Embed(type = \"rich\",\n description = description,\n **kwargs)", "async def charinfo(self, ctx: Context, *, characters: str) -> None:\n match = re.match(r\"<(a?):(\\w+):(\\d+)>\", characters)\n if match:\n embed = Embed(\n title=\"Non-Character Detected\",\n description=\"Only unicode characters can be processed, but a custom Discord emoji \" \"was found. Please remove it and try again.\",\n )\n embed.colour = Color.red()\n await ctx.send(embed=embed)\n return\n\n if len(characters) > 25:\n embed = Embed(title=f\"Too many characters ({len(characters)}/25)\")\n embed.colour = Color.red()\n await ctx.send(embed=embed)\n return\n\n def get_info(char: str) -> Tuple[str, str]:\n digit = f\"{ord(char):x}\"\n if len(digit) <= 4:\n u_code = f\"\\\\u{digit:>04}\"\n else:\n u_code = f\"\\\\U{digit:>08}\"\n url = f\"https://www.compart.com/en/unicode/U+{digit:>04}\"\n name = f\"[{unicodedata.name(char, '')}]({url})\"\n info = f\"`{u_code.ljust(10)}`: {name} - {char}\"\n return info, u_code\n\n charlist, rawlist = zip(*(get_info(c) for c in characters))\n\n embed = Embed(description=\"\\n\".join(charlist))\n embed.set_author(name=\"Character Info\")\n\n if len(characters) > 1:\n embed.add_field(name=\"Raw\", value=f\"`{''.join(rawlist)}`\", inline=False)\n\n await ctx.send(embed=embed)", "def display_fruit(self):\n self.window.addch(self.fruit_position[1], self.fruit_position[0], self.FRUIT_CHAR)", "def disp_msg(msg):\n from x84.bbs import getterminal, echo\n term = getterminal()\n echo(u''.join((u'\\r\\n\\r\\n',\n term.bold_yellow('%s ' % (msg,),),\n term.yellow_reverse_bold(u'...'),)))", "def test__parse_emoji__coloned_builtin_name():\n emoji = BUILTIN_EMOJIS['heart']\n text = f':{emoji.name}:'\n\n parsed_emoji = parse_emoji(text)\n vampytest.assert_is(emoji, parsed_emoji)", "def show_on_screen(self, string, location, font='Arial', font_size=20, colour=WHITE):\n msg = pygame.font.SysFont(font, font_size).render(str(string), True, colour)\n self.screen.blit(msg, location)", "async def microsoft(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"microsoft\", emoji)", "async def google(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"google\", emoji)", "async def say(self, ctx, *, message):\n message = self.emojify(message)\n await ctx.send(message)", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def show_text(text, colour):\n message = font_style.render(text, True, colour)\n dis.blit(message, [game_size_x/2, game_size_y/2])", "def label_to_emoji(label):\n return emoji.emojize(emoji_dictionary[str(label)], use_aliases=True)", "def emu_print(text):\n print \"%s %s\" % (EMU_PRINT_PREFIX, text)", "def replace_emojis(text, replace_with=\"_EMOJI_\"):\n return RE_EMOJI.sub(replace_with, text)", "def info(msg):\n click.secho(msg, fg='blue')", "def isUnicodeEmoji(c : str) -> bool:\n return c in UNICODE_EMOJI", "def test__Emoji__as_emoji():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_instance(emoji.as_emoji, str)\n \n emoji = Emoji()\n vampytest.assert_instance(emoji.as_emoji, str)", "def de_emojify(self, tweet):\n regrex_pattern = re.compile(\n pattern=\"[\"\n \"\\U0001F600-\\U0001F64F\" # emoticons\n \"\\U0001F300-\\U0001F5FF\" # symbols & pictographs\n \"\\U0001F680-\\U0001F6FF\" # transport & map symbols\n \"\\U0001F1E0-\\U0001F1FF\" # flags (iOS)\n \"\\U00002500-\\U00002BEF\" # chinese char\n \"\\U00002702-\\U000027B0\"\n \"\\U00002702-\\U000027B0\"\n \"\\U000024C2-\\U0001F251\"\n \"\\U0001f926-\\U0001f937\"\n \"\\U00010000-\\U0010ffff\"\n \"\\u2640-\\u2642\"\n \"\\u2600-\\u2B55\"\n \"\\u200d\"\n \"\\u23cf\"\n \"\\u23e9\"\n \"\\u231a\"\n \"\\ufe0f\" # dingbats\n \"\\u3030\"\n \"]+\",\n flags=re.UNICODE,\n )\n return regrex_pattern.sub(r\"\", tweet)", "def label(mi_, ma_):\n\treturn \"caractรจres Unicode des points de code {} ร  {}\".format(mi_, ma_)", "def print_msg(*msg):\n colour_format = '0;36'\n print('\\x1b[%sm%s\\x1b[0m' % (colour_format, \" \".join([m if isinstance(m, str) else str(m) for m in msg])))", "def fix_emoji(val):\n def _emoji_debugger(val):\n s = val.replace('<span class=\"emoji emoji1f450\"></span',\n '<span class=\"emoji emoji1f450\"></span>')\n\n def __fix_miss_match(m):\n return '<span class=\"emoji emoji%s\"></span>' % ({\n '1f63c': '1f601', '1f639': '1f602', '1f63a': '1f603',\n '1f4ab': '1f616', '1f64d': '1f614', '1f63b': '1f60d',\n '1f63d': '1f618', '1f64e': '1f621', '1f63f': '1f622',\n }.get(m.group(1), m.group(1)))\n return WeChatMeta.RE['emoji'].sub(__fix_miss_match, s)\n\n def _emoji_formatter(m):\n s = m.group(1)\n if len(s) == 6:\n return ('\\\\U%s\\\\U%s'%(s[:2].rjust(8, '0'), s[2:].rjust(8, '0')))\\\n .encode('utf8').decode('unicode-escape', 'replace')\n elif len(s) == 10:\n return ('\\\\U%s\\\\U%s'%(s[:5].rjust(8, '0'), s[5:].rjust(8, '0')))\\\n .encode('utf8').decode('unicode-escape', 'replace')\n else:\n return ('\\\\U%s'%m.group(1).rjust(8, '0'))\\\n .encode('utf8').decode('unicode-escape', 'replace')\n val = _emoji_debugger(val)\n val = WeChatMeta.RE['emoji'].sub(_emoji_formatter, val)\n return val", "async def add_emoji(\n client,\n event,\n emoji: ('str', 'The emoji to add.'),\n name: ('str', 'Custom name to add the emoji with.') = None\n):\n if not client.is_owner(event.user):\n abort('Owner only!')\n \n emoji = parse_emoji(emoji)\n if emoji is None:\n abort('That\\'s not an emoji.')\n \n if emoji.is_unicode_emoji():\n abort('Cannot add unicode emojis')\n \n if name is None:\n name = emoji.name\n else:\n if len(name) > 32:\n abort('Name length can be max 32.')\n \n embed = Embed('Are you sure to add this emoji?').add_field('Name:', name).add_image(emoji.url)\n \n message = yield InteractionResponse(embed = embed, components = ADD_EMOJI_COMPONENTS)\n \n try:\n component_interaction = await wait_for_component_interaction(\n message,\n timeout = 300.0,\n check = functools.partial(check_is_user_same, event.user)\n )\n \n except TimeoutError:\n component_interaction = None\n cancelled = True\n else:\n if component_interaction.interaction == ADD_EMOJI_BUTTON_CANCEL:\n cancelled = True\n else:\n cancelled = False\n \n if cancelled:\n embed.title = 'Adding emoji has been cancelled.'\n else:\n embed.title = 'Emoji has been added!'\n \n async with client.http.get(emoji.url) as response:\n emoji_data = await response.read()\n \n await client.emoji_create(event.guild, name, emoji_data)\n \n yield InteractionResponse(embed = embed, components = None, message = message, event = component_interaction)", "async def twitter(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"twitter\", emoji)", "def isCustomEmoji(s : str) -> bool:\n if s.startswith(\"<\") and s.endswith(\">\"):\n try:\n first = s.index(\":\")\n second = first + s[first+1:].index(\":\") + 1\n except ValueError:\n return False\n return stringTyping.isInt(s[second+1:-1])\n return False", "def isemoji(c):\n if type(c) == str:\n c = c.encode('utf-8')\n c = bytes(c)\n return c.decode() in UNICODE_EMOJI", "async def addreact(self, ctx, word, emoji):\n guild = ctx.message.guild\n message = ctx.message\n emoji = https://i.imgur.com/CWeQ620.jpg", "async def _serveremoji(self, ctx):\n non_animated_list= [f'<:{i.name}:{i.id}>' for i in ctx.guild.emojis if not i.animated]\n animated_list= [f'<a:{i.name}:{i.id}>' for i in ctx.guild.emojis if i.animated]\n\n if len(non_animated_list)==0 and len(animated_list)==0:\n await ctx.send(f\"\"\":exclamation: {ctx.author.mention}\n```{random.choice(self.bot.SERVER_CONFIG['text_colors'])}\nNo custom emojis has been added in this Server.\n```\"\"\")\n else:\n #NON ANIMATED EMOJIS\n if len(non_animated_list)>0:\n await ctx.send(f'**{len(non_animated_list)} Server Emojis**')\n k=0\n non_animated=[]\n temp=''\n for i in range(ceil(len(non_animated_list)/5)):\n temp += ' '.join(non_animated_list[k:k+5])+'\\n'\n k+=5\n if k%25==0:\n non_animated.append(temp)\n temp=''\n non_animated.append(temp) if temp !='' else ''\n \n for i in non_animated:\n await ctx.send(i)\n\n\n #ANIMATED EMOJIS\n if len(animated_list)>0:\n await ctx.send(f'**{len(animated_list)} Server Animated Emojis**')\n k=0\n animated=[]\n temp=''\n for i in range(ceil(len(animated_list)/5)):\n temp += ' '.join(animated_list[k:k+5])+'\\n'\n k+=5\n if k%25==0:\n animated.append(temp)\n temp=''\n animated.append(temp) if temp !='' else ''\n \n for i in animated:\n await ctx.send(i)", "def display(self, char, mask=None, isCursorCell=False):\n\n if char == '&':\n char = '&amp;'\n elif char == '<':\n char = '&lt;'\n elif char == '\\t':\n char = '$t'\n\n markup = self.MARKUP_NORMAL\n if isCursorCell:\n markup = markup % self.MARKUP_CURSOR_CELL\n label, = self._displayedChar.get_children()\n label.set_markup(markup % char)\n\n if mask in [DOT_7, DOTS_78]:\n self.dot7.raiseDot()\n if mask in [DOT_8, DOTS_78]:\n self.dot8.raiseDot()", "async def samsung(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"samsung\", emoji)", "def showText(self, context, text, size=1, color=colors.WHITE, conversion=True):\n context.print(text, self.components, size, color=color, conversion=conversion)", "def emoji_oneword(self, text):\n return text.replace('_', '')", "def print_text(TINY_FONT, x, y, text, color = white):\n text_image = TINY_FONT.render(text, True, color)\n gameDisplay.blit(text_image, (x,y))", "def __str__(self):\n if self._active_player:\n def piece_to_index(piece):\n return (piece & 0xF)\n else:\n def piece_to_index(piece):\n return (piece & 0xE) | (0 if piece & 1 else 1)\n\n return '\\n'.join(map(\n lambda posY, row: ''.join(map(\n lambda posX, piece: self.EMOJI[\n piece_to_index(piece)\n if piece else\n 14 + ((posY + posX) % 2)],\n count(), row)),\n count(),\n self.board if self._active_player else reversed(\n [reversed(row) for row in self.board])))", "async def charinfo(self, ctx: Context, *, characters: str) -> None:\n match = re.match(r\"<(a?):(\\w+):(\\d+)>\", characters)\n if match:\n await messages.send_denial(\n ctx,\n \"**Non-Character Detected**\\n\"\n \"Only unicode characters can be processed, but a custom Discord emoji \"\n \"was found. Please remove it and try again.\"\n )\n return\n\n if len(characters) > 50:\n await messages.send_denial(ctx, f\"Too many characters ({len(characters)}/50)\")\n return\n\n def get_info(char: str) -> tuple[str, str]:\n digit = f\"{ord(char):x}\"\n if len(digit) <= 4:\n u_code = f\"\\\\u{digit:>04}\"\n else:\n u_code = f\"\\\\U{digit:>08}\"\n url = f\"https://www.compart.com/en/unicode/U+{digit:>04}\"\n name = f\"[{unicodedata.name(char, '')}]({url})\"\n info = f\"`{u_code.ljust(10)}`: {name} - {utils.escape_markdown(char)}\"\n return info, u_code\n\n char_list, raw_list = zip(*(get_info(c) for c in characters), strict=True)\n embed = Embed().set_author(name=\"Character Info\")\n\n if len(characters) > 1:\n # Maximum length possible is 502 out of 1024, so there's no need to truncate.\n embed.add_field(name=\"Full Raw Text\", value=f\"`{''.join(raw_list)}`\", inline=False)\n\n await LinePaginator.paginate(char_list, ctx, embed, max_lines=10, max_size=2000, empty=False)", "def showText(pos):\n\treturn OnscreenText( \\\n\t\ttext=\" \", \\\n\t\tstyle=1, fg=(0,0,0,1), pos=(-1.3, pos), \\\n\t\talign=TextNode.ALeft, scale = .06, mayChange = True)", "async def stealemoji(self, ctx, *, emojis):\n try:\n m = await commands.MessageConverter().convert(ctx, emojis)\n emojis = m.content\n except commands.MessageNotFound:\n pass\n\n emojis = [await commands.PartialEmojiConverter().convert(ctx, e) for e in\n re.findall(r'<a?:\\w+:\\d+>', emojis)]\n\n if not emojis:\n await ctx.send_help()\n return\n\n ae = list(ctx.guild.emojis) + emojis\n if len([e for e in ae if not e.animated]) > ctx.guild.emoji_limit:\n await ctx.send(\"Not enough emoji slots\")\n if len([e for e in ae if e.animated]) > ctx.guild.emoji_limit:\n await ctx.send(\"Not enough animated emoji slots\")\n\n async with ctx.typing():\n for emoji in emojis:\n if emoji.name in [e.name for e in ctx.guild.emojis]:\n continue\n await ctx.guild.create_custom_emoji(name=emoji.name, image=await emoji.url.read())\n await ctx.tick()", "def find_emoji(term):\r\n emoji_dict = _make_emoji_mapping()\r\n emoji_name_lengths = []\r\n emojis_found = {}\r\n for emoji, emoji_name in emoji_dict.items():\r\n if term.lower() in emoji_name.lower():\r\n emoji_name_lengths.append(len(emoji_name))\r\n emojis_found[emoji_name.strip()] = emoji.strip()\r\n if len(emojis_found) == 0:\r\n print(\"no matches\")\r\n else:\r\n for name, emoji in emojis_found.items():\r\n print(f\"{name.title(): <{max(emoji_name_lengths)}} | {emoji}\")", "def test__Emoji__is_unicode_emoji():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_true(emoji.is_unicode_emoji())\n \n emoji = Emoji()\n vampytest.assert_false(emoji.is_unicode_emoji())", "def weather_emoji(description: str):\n\n emoji_map = {\n \"cloud\": \"โ˜๏ธ\",\n \"rain\": \"๐ŸŒง\",\n \"sun\": \"โ˜€๏ธ\",\n \"snow\": \"โ„๏ธ\",\n }\n\n emojis = \"\"\n for key in emoji_map:\n if key in description:\n emojis += emoji_map[key]\n return emojis", "async def textemote(self, ctx, *, msg):\n try:\n await ctx.message.delete()\n except discord.Forbidden:\n pass\n\n if msg != None:\n out = msg.lower()\n text = out.replace(' ', ' ').replace('10', '\\u200B:keycap_ten:')\\\n .replace('ab', '\\u200B๐Ÿ†Ž').replace('cl', '\\u200B๐Ÿ†‘')\\\n .replace('0', '\\u200B:zero:').replace('1', '\\u200B:one:')\\\n .replace('2', '\\u200B:two:').replace('3', '\\u200B:three:')\\\n .replace('4', '\\u200B:four:').replace('5', '\\u200B:five:')\\\n .replace('6', '\\u200B:six:').replace('7', '\\u200B:seven:')\\\n .replace('8', '\\u200B:eight:').replace('9', '\\u200B:nine:')\\\n .replace('!', '\\u200Bโ—').replace('?', '\\u200Bโ“')\\\n .replace('vs', '\\u200B๐Ÿ†š').replace('.', '\\u200B๐Ÿ”ธ')\\\n .replace(',', '๐Ÿ”ป').replace('a', '\\u200B๐Ÿ…ฐ')\\\n .replace('b', '\\u200B๐Ÿ…ฑ').replace('c', '\\u200B๐Ÿ‡จ')\\\n .replace('d', '\\u200B๐Ÿ‡ฉ').replace('e', '\\u200B๐Ÿ‡ช')\\\n .replace('f', '\\u200B๐Ÿ‡ซ').replace('g', '\\u200B๐Ÿ‡ฌ')\\\n .replace('h', '\\u200B๐Ÿ‡ญ').replace('i', '\\u200B๐Ÿ‡ฎ')\\\n .replace('j', '\\u200B๐Ÿ‡ฏ').replace('k', '\\u200B๐Ÿ‡ฐ')\\\n .replace('l', '\\u200B๐Ÿ‡ฑ').replace('m', '\\u200B๐Ÿ‡ฒ')\\\n .replace('n', '\\u200B๐Ÿ‡ณ').replace('รฑ', '\\u200B๐Ÿ‡ณ')\\\n .replace('o', '\\u200B๐Ÿ…พ').replace('p', '\\u200B๐Ÿ…ฟ')\\\n .replace('q', '\\u200B๐Ÿ‡ถ').replace('r', '\\u200B๐Ÿ‡ท')\\\n .replace('s', '\\u200B๐Ÿ‡ธ').replace('t', '\\u200B๐Ÿ‡น')\\\n .replace('u', '\\u200B๐Ÿ‡บ').replace('v', '\\u200B๐Ÿ‡ป')\\\n .replace('w', '\\u200B๐Ÿ‡ผ').replace('x', '\\u200B๐Ÿ‡ฝ')\\\n .replace('y', '\\u200B๐Ÿ‡พ').replace('z', '\\u200B๐Ÿ‡ฟ')\n try:\n await ctx.send(text)\n except Exception as e:\n await ctx.send(f'```{e}```')\n else:\n await ctx.send('Args req!', delete_after=3.0)", "async def textemote(self, ctx, *, msg):\n await ctx.message.delete()\n if msg != None:\n out = msg.lower()\n text = out.replace(' ', ' ').replace('10', '\\u200B:keycap_ten:')\\\n .replace('ab', '\\u200B๐Ÿ†Ž').replace('cl', '\\u200B๐Ÿ†‘')\\\n .replace('0', '\\u200B:zero:').replace('1', '\\u200B:one:')\\\n .replace('2', '\\u200B:two:').replace('3', '\\u200B:three:')\\\n .replace('4', '\\u200B:four:').replace('5', '\\u200B:five:')\\\n .replace('6', '\\u200B:six:').replace('7', '\\u200B:seven:')\\\n .replace('8', '\\u200B:eight:').replace('9', '\\u200B:nine:')\\\n .replace('!', '\\u200Bโ—').replace('?', '\\u200Bโ“')\\\n .replace('vs', '\\u200B๐Ÿ†š').replace('.', '\\u200B๐Ÿ”ธ')\\\n .replace(',', '๐Ÿ”ป').replace('a', '\\u200B๐Ÿ…ฐ')\\\n .replace('b', '\\u200B๐Ÿ…ฑ').replace('c', '\\u200B๐Ÿ‡จ')\\\n .replace('d', '\\u200B๐Ÿ‡ฉ').replace('e', '\\u200B๐Ÿ‡ช')\\\n .replace('f', '\\u200B๐Ÿ‡ซ').replace('g', '\\u200B๐Ÿ‡ฌ')\\\n .replace('h', '\\u200B๐Ÿ‡ญ').replace('i', '\\u200B๐Ÿ‡ฎ')\\\n .replace('j', '\\u200B๐Ÿ‡ฏ').replace('k', '\\u200B๐Ÿ‡ฐ')\\\n .replace('l', '\\u200B๐Ÿ‡ฑ').replace('m', '\\u200B๐Ÿ‡ฒ')\\\n .replace('n', '\\u200B๐Ÿ‡ณ').replace('รฑ', '\\u200B๐Ÿ‡ณ')\\\n .replace('o', '\\u200B๐Ÿ…พ').replace('p', '\\u200B๐Ÿ…ฟ')\\\n .replace('q', '\\u200B๐Ÿ‡ถ').replace('r', '\\u200B๐Ÿ‡ท')\\\n .replace('s', '\\u200B๐Ÿ‡ธ').replace('t', '\\u200B๐Ÿ‡น')\\\n .replace('u', '\\u200B๐Ÿ‡บ').replace('v', '\\u200B๐Ÿ‡ป')\\\n .replace('w', '\\u200B๐Ÿ‡ผ').replace('x', '\\u200B๐Ÿ‡ฝ')\\\n .replace('y', '\\u200B๐Ÿ‡พ').replace('z', '\\u200B๐Ÿ‡ฟ')\n try:\n await ctx.send(text)\n except Exception as e:\n await ctx.send(f'```{e}```')\n else:\n await ctx.send('Args req!', delete_after=3.0)", "async def messenger(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"messenger\", emoji)", "def _showMessage(self, msg: str) -> None:\n\n raise NotImplementedError()", "def test__Emoji__as_reaction():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_instance(emoji.as_reaction, str)\n \n emoji = Emoji()\n vampytest.assert_instance(emoji.as_reaction, str)", "def error_embed(self, message: str):\n embed = discord.Embed(color=discord.Color.red())\n embed.title = \"\"\n embed.description = message\n return embed", "def message(msg, color):\n mesg = font_style.render(msg, True, color)\n dis.blit(mesg, [dis_width / 3, dis_height / 2])", "def icon(self):\n return \"mdi:speaker\"", "def showme(message):\n print(message)", "def get_random_emoji():\n return (random.choice(get_emoji_list())).encode('utf-8').decode('utf-8')", "async def starboard_emoji(self, ctx, emoji):\n if emoji[0] == \"<\":\n # is custom emoji\n emoji_obj = await util.get_emoji(ctx, emoji)\n if emoji_obj is None:\n raise exceptions.Warning(\"I don't know this emoji!\")\n\n await self.bot.db.execute(\n \"\"\"\n INSERT INTO starboard_settings (guild_id, emoji_name, emoji_id, emoji_type)\n VALUES (%s, %s, %s, %s)\n ON DUPLICATE KEY UPDATE\n emoji_name = VALUES(emoji_name),\n emoji_id = VALUES(emoji_id),\n emoji_type = VALUES(emoji_type)\n \"\"\",\n ctx.guild.id,\n None,\n emoji_obj.id,\n \"custom\",\n )\n await util.send_success(\n ctx, f\"Starboard emoji is now {emoji} (emoji id `{emoji_obj.id}`)\"\n )\n else:\n # unicode emoji\n emoji_name = emoji_literals.UNICODE_TO_NAME.get(emoji)\n if emoji_name is None:\n raise exceptions.Warning(\"I don't know this emoji!\")\n\n await self.bot.db.execute(\n \"\"\"\n INSERT INTO starboard_settings (guild_id, emoji_name, emoji_id, emoji_type)\n VALUES (%s, %s, %s, %s)\n ON DUPLICATE KEY UPDATE\n emoji_name = VALUES(emoji_name),\n emoji_id = VALUES(emoji_id),\n emoji_type = VALUES(emoji_type)\n \"\"\",\n ctx.guild.id,\n emoji_name,\n None,\n \"unicode\",\n )\n await util.send_success(ctx, f\"Starboard emoji is now {emoji}\")\n await self.bot.cache.cache_starboard_settings()", "def init_emoji(self, client):\n for emoji in client.get_all_emojis():\n if emoji.name == self.emoji:\n self.emoji = str(emoji)\n return\n\n self.emoji = \":\" + self.emoji + \":\"", "def display():\n screen.addch(head[0],head[1],'x')", "def get_informative_message(self, msg_content):\n return \"{0}{1}{2}\".format(\"\\x1b[6;30;44m\", msg_content, \"\\x1b[0m\")", "def get_emojis(self):\n return self.tweets.str.findall(r':{1}[\\d\\w\\-]+:{1}')", "def _show_piece(self, x_pos, y_pos):\n piece = self.board[x_pos, y_pos]\n if isinstance(piece, Pawn): return '{}P'.format(piece.color[0])\n elif isinstance(piece, Knight): return '{}N'.format(piece.color[0])\n elif isinstance(piece, Bishop): return '{}B'.format(piece.color[0])\n elif isinstance(piece, Rook): return '{}R'.format(piece.color[0])\n elif isinstance(piece, Queen): return '{}Q'.format(piece.color[0])\n elif isinstance(piece, King): return '{}K'.format(piece.color[0])\n else: return ' '", "def print_message(self, message, color):\n\n xpos = 20\n ypos = self.height\n\n size = common.FONT_SIZE\n\n arcade.draw_text(\n text=message,\n start_x=xpos,\n start_y=ypos,\n anchor_x=\"left\",\n anchor_y=\"top\",\n width=size*len(message),\n color=color,\n font_size=size,\n bold=True)", "async def emoji_edit(client, emoji, old_attributes):\n channel = get_log_emoji_channel(emoji.guild_id)\n if (channel is None):\n return\n \n await client.message_create(\n channel,\n embed = build_emoji_edit_embed(emoji, old_attributes),\n allowed_mentions = None,\n )", "def use_external_emojis(_) -> int:\n return 1 << 18", "def use_external_emojis(_) -> int:\n return 1 << 18", "def code_display(self):\r\n return u'%s-%s-%s' % (self.code[:3], self.code[3:6], self.code[6:])", "def test__emoji__url():\n emoji = BUILTIN_EMOJIS['x']\n vampytest.assert_is(emoji.url, None)\n\n emoji = Emoji()\n vampytest.assert_instance(emoji.url, str)", "def rock():\n typer.echo(\"๐Ÿค–๐Ÿค˜\")", "async def _unicode(self, ctx):\n if await self.config.guild(ctx.guild).unicode():\n await self.config.guild(ctx.guild).unicode.set(False)\n msg = _(\"Okay, I will not react to messages \" \"containing unicode emojis!\")\n await ctx.send(msg)\n else:\n await self.config.guild(ctx.guild).unicode.set(True)\n msg = _(\"Okay, I will react to messages \" \"containing unicode emojis!\")\n await ctx.send(msg)", "def rendermsg(self,msg):\n return ' '.join(['%02x'%ord(x) for x in msg])", "async def whatsapp(self, ctx, *, emoji: str):\n await self.get_emoji(ctx, \"whatsapp\", emoji)", "def __str__(self):\n return f'({self.character})'", "def show_message(message, col=c.r, update=False):\n g.content = generate_songlist_display()\n g.message = col + message + c.w\n\n if update:\n screen_update()", "async def emojiboard(self, ctx, arg1: U = None, arg2: U = None, arg3: U = None):\n (channel, member, emoji) = self.resolve_arguments(arg1, arg2, arg3, types=get_args(U))\n\n await ctx.trigger_typing()\n\n member_id = member.id if member else None\n channel_id = channel.id if channel else None\n bot_ids = [bot.id for bot in filter(lambda user: user.bot, ctx.guild.members)]\n emoji_id = emoji.id if emoji else None\n\n data = await self.bot.db.emojiboard.select(ctx.guild.id, bot_ids, channel_id, member_id, emoji_id)\n\n embed =await self.display_emojiboard(ctx, data)\n await ctx.send(embed=embed)", "async def emojis(self, ctx):\n server = ctx.message.server\n await self.bot.say('This may take some time, generating list...')\n data = discord.Embed(description=\"Emojilist\")\n for ej in server.emojis:\n data.add_field(\n name=ej.name, value=str(ej) + \" \" + ej.id, inline=False)\n await self.bot.say(embed=data)", "def errors_icons(self):\n msg_errors_lifes = ''\n for i in range(0,5):\n if self.letters_wrong <= i:\n msg_errors_lifes += ' โ™ฅ '\n else:\n msg_errors_lifes += ' โ˜  ' \n return msg_errors_lifes", "def add_msg(pos, msg):\n return OnscreenText(text=msg, style=1, fg=(1, 1, 1, 1), shadow=(0, 0, 0, 1),\n parent=base.a2dTopLeft, align=TextNode.ALeft,\n pos=(0.08, -pos - 0.04), scale=.05)", "def show_message(self, message):\n self.sense.show_message(\n message,\n scroll_speed=self.SCROLL_SPEED,\n text_colour=self.TEXT_COLOUR\n )" ]
[ "0.74917614", "0.6761148", "0.6492297", "0.619809", "0.61710256", "0.6136859", "0.61155015", "0.6039681", "0.59957427", "0.5945978", "0.5936778", "0.5936708", "0.58585626", "0.5836026", "0.5809411", "0.5724715", "0.5692132", "0.5662418", "0.5606713", "0.55910206", "0.5587064", "0.5584996", "0.55786157", "0.55631757", "0.55511403", "0.55405706", "0.5527404", "0.55245394", "0.5502", "0.54958296", "0.5483756", "0.5469565", "0.54606926", "0.54362166", "0.5427706", "0.54270726", "0.54270726", "0.54270726", "0.5426591", "0.54156107", "0.5410777", "0.5408107", "0.5398413", "0.53934914", "0.5384987", "0.5378394", "0.5377406", "0.5351773", "0.534162", "0.53247285", "0.5288669", "0.5281321", "0.5274848", "0.5274045", "0.5268684", "0.5263233", "0.52589864", "0.52489984", "0.5247376", "0.52454114", "0.5244768", "0.52285445", "0.52270764", "0.52236205", "0.52225", "0.5213446", "0.52091867", "0.5200856", "0.5190369", "0.51854306", "0.518396", "0.51672465", "0.516552", "0.51634645", "0.51599234", "0.5148935", "0.5140566", "0.513797", "0.5125666", "0.51204884", "0.51131624", "0.5111515", "0.5109947", "0.5105479", "0.5099677", "0.509862", "0.509862", "0.50954884", "0.50936484", "0.5088892", "0.5087312", "0.5087182", "0.50723857", "0.5068492", "0.5049161", "0.5048305", "0.50380385", "0.502583", "0.5024475", "0.50208753" ]
0.75498223
0
Returns the selected text channel's name's length.
async def text_channel_name_length( channel: Channel ): return len(channel.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def voice_channel_name_length(\n channel: P('channel', 'Select a voice channel', channel_types = [ChannelType.guild_voice])\n):\n return len(channel.name)", "async def thread_channel_name_length(\n channel: ('channel_group_thread', 'Select a thread channel.')\n):\n return len(channel.name)", "def namelength(self):\n return self[\"namelength\"]", "def length_of_name(self, name):\n length = len(name)\n if length > 10:\n self.show_message_when_name_very_long()\n return length", "def get_length(self):\n return self.run_command('get_length')[0]", "def channel_size(self):\n if self.channels is None:\n return 0\n return self.channels.size", "def LEN(text):\n return len(text)", "def get_length(self):\n return self._select_interface(self._rc_get_length,\n self._http_get_length)", "def __len__(self) -> int:\n\n return len(self._space.CHANNELS) + 1", "def test_support_CHANNELLEN(self):\n default = irc.ServerSupportedFeatures()._features[\"CHANNELLEN\"]\n self._testIntOrDefaultFeature(\"CHANNELLEN\", default)", "def getconenamelen(self,i_): # 3\n res,resargs = self.__obj.getconenamelen(i_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _len_return_value = resargs\n return _len_return_value", "def _get_length(self):\n return self._length", "def length_name(self):\n return self._src_decoder.length_tensor_name", "def get_string_length(self):\n return int(self.read('H')[0])", "def length(self):\n return self._info.length # pylint: disable=E1101", "def GetLen(*args, **kwargs):\n return _gdi_.PseudoDC_GetLen(*args, **kwargs)", "def __len__(self):\n # TODO: Is this method used?\n return self._info['length']", "def getLength(self):\n return self.count", "def getLength(self):\n return self.length", "def get_length(self):\n return self._length", "def get_length(self):\n return self._length", "def get_length(self):\n\n return self.length", "def getLength(self):\n return self.n", "def size(self):\n return len(self.chars)", "def width(self, text):\n return len(text) * (self.font_width + 1)", "def length(self):\n return len(self.text)", "def getLength(msg):\n return len(msg)", "def Lof(channel):\n return FileLen(VBFiles.getFile(channel).name)", "def getLength(self, text):\n\n return len(text[self.table_header[0]])", "def get_remaining_character_count(self):\n return self.driver.find(CHARACTER_COUNT).text", "def getLen(self):\n return self.len", "def n_channels(self):\n return len(self.channels)", "def length(self):\n\t\treturn self.n", "def get_num_channels():\r\n check_mixer()\r\n return sdl.Mix_GroupCount(-1)", "def length(self):\n ...", "def llen(self, name):\n self.connect()\n self._write('LLEN %s\\r\\n' % name)\n return self._get_numeric_response()", "def getTextLength(self):\r\n return 0", "def num_channels(self):\n with audioread.audio_open(self.path) as f:\n return f.channels", "def title_len(self) -> int:\n return self.__title_len", "def char_size(self):\n return len(self.id2char)", "def get_length(self):\n\n return self._length", "def get_len(song, album):\r\n length = 0\r\n words = dbase()[album][0][song]\r\n words = words[2]\r\n words = words.split()\r\n for word in words:\r\n length += 1\r\n return str(length)", "def get_length(self):\r\n return len(self.deck)", "def length(self):\n return self.length", "def tester(name):\n return len(name)", "def length(self) -> 'int':\n return self._info.len", "def Length(self) -> int:", "def Length(self) -> int:", "def get_length(self):\n return len(self.cards)", "def num_chars(word):\n return len(word)", "def getLength(self):\n return None", "def getLength(self):\n return None", "def size(self):\n return len(self.sentence)", "def printed_length(string):\n # It returns the length of the printed string\n return len(remove_colors(string))", "def getconenamelen(self,i_):\n len_ = ctypes.c_int32()\n res = __library__.MSK_XX_getconenamelen(self.__nativep,i_,ctypes.byref(len_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n len_ = len_.value\n _len_return_value = len_\n return (_len_return_value)", "def getLength(message):\r\n length = 0\r\n for char in message:\r\n if char in alphaUpper or alphaLower:\r\n length += 1\r\n return length", "def n_channels(self):\n return self._n_channels", "def length(self):\n return self.counter", "def get_counts(self):\n value = self.text_ctrl.GetValue()\n chars = len(value)\n words = len(re.findall('\\w+', value))\n pub.sendMessage('update_counts', chars=chars, words=words)", "def get_length(self):\r\n return len(self.hand)", "def length(self):\n pass", "def message_length(self):\n return self._message_length", "def length(self):\n return self._length", "def length(self):\n return self._length", "async def select_channel(self, channels: [abc.GuildChannel], workspace: WorkspaceEntity) -> str:\n text_channels = []\n max_messages = -1\n max_messages_channel = ''\n for channel in channels:\n if isinstance(channel, TextChannel):\n # skip generated channel from being selected\n if channel.name == workspace.generated_channel_name:\n continue\n text_channels.append(channel.id)\n\n for channel_id in text_channels:\n channel = await self.get_channel(channel_id)\n one_month_ago = datetime.utcnow() - timedelta(days=30)\n messages = await channel.history(limit=None, after=one_month_ago).flatten()\n valid_message = [m for m in messages if not m.author.bot]\n if len(valid_message) > max_messages:\n max_messages = len(valid_message)\n max_messages_channel = channel_id\n\n return max_messages_channel", "def get_num_channels(self):\r\n check_mixer()\r\n return sdl.Mix_GroupCount(self._chunk_tag)", "def length(self):\n return self.__length", "def length(self):\n return self.__length", "def max_length(self):\n\t\treturn self._max_length", "def length(self) -> int:\r\n\r\n return self.__length", "def GetCharWidth(*args, **kwargs):\n return _gdi_.DC_GetCharWidth(*args, **kwargs)", "def characters_left(self):\r\n return self.max_chars - len(self.variable.get())", "def namelengthsrc(self):\n return self[\"namelengthsrc\"]", "def getLength( self, sbjct_token ):\n if not self.mIsLoaded: self.__loadIndex()\n return self.mIndex[sbjct_token][2]", "def size(name):", "def length(self):\n return len(self._commands)", "def __len__(self) -> int:\n return len(self.length)", "def get_num_channels(self):\n return _uhd_swig.tx_streamer_get_num_channels(self)", "def total_length():\n return", "def target_length_name(self):\n name = dsutils._connect_name(\n self._data_spec.name_prefix[1],\n self._tgt_decoder.length_tensor_name)\n return name", "def getLength(self):\n return self.sideLength", "def sent_len(self) -> int:\n raise NotImplementedError(\"must be implemented by subclasses\")", "def __len__(self):\n return len(self.label)", "def length(self):\n return self.count", "def _text_length(self, text):\n\n if isinstance(text, dict): # {key: value} case\n return len(next(iter(text.values())))\n elif not hasattr(text, '__len__'): # Object has no len() method\n return 1\n elif len(text) == 0 or isinstance(text[0], int): # Empty string or list of ints\n return len(text)\n else:\n return sum([len(t) for t in text]) # Sum of length of individual strings", "def __len__(self) -> int:\n return self.length", "def visual_len(text) -> int:\n return len(text) if NO_COLOR else len(_remove_regex(\"\\033\\\\[[0-9]*m\", text))", "def get_length(self):\n return self.resource.get_size()", "def source_length_name(self):\n name = dsutils._connect_name(\n self._data_spec.name_prefix[0],\n self._src_decoder.length_tensor_name)\n return name", "def actual_len(self, index):\n word = self.get_prev_word(index, orignal=True)\n return len(word)", "def __len__(self) -> int:\n return self._length", "def __len__(self):\n return self._length # pylint: disable = E1101", "def length(self) -> int:\n return self.size", "def length(self) -> int:\n return self.size", "def size(self):\n return _(len(self._))", "def getvarnamelen(self,i_): # 3\n res,resargs = self.__obj.getvarnamelen(i_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _len_return_value = resargs\n return _len_return_value", "def get_number_of_channels(tgt_l, model_graph):\n return int(model_graph.get_tensor_by_name(tgt_l + ':0').get_shape()[-1])", "def length(self):\n return self.size", "def length(self) -> int:\n pass", "def get_length(self):\n return len(self.target) + len(self.action)" ]
[ "0.7989445", "0.77088827", "0.67712826", "0.6691694", "0.6674016", "0.6535482", "0.64485866", "0.6269007", "0.6205133", "0.61872053", "0.6186812", "0.6179118", "0.6176119", "0.6155029", "0.61237496", "0.6103924", "0.60789996", "0.60677344", "0.6062524", "0.6050127", "0.6050127", "0.60476947", "0.60453486", "0.60388285", "0.6026577", "0.6008284", "0.59969616", "0.59827906", "0.59786963", "0.5945429", "0.592543", "0.59244025", "0.5921967", "0.5919833", "0.59192157", "0.5902356", "0.589612", "0.5880999", "0.5879979", "0.58766854", "0.58723027", "0.5866059", "0.58659023", "0.58648425", "0.58642346", "0.58618546", "0.5860306", "0.5860306", "0.5844802", "0.5835046", "0.5832292", "0.5832292", "0.58271277", "0.5825819", "0.5817991", "0.57997566", "0.5798787", "0.57976997", "0.5787925", "0.57805085", "0.57621425", "0.5759226", "0.57542914", "0.57542914", "0.5739507", "0.5739285", "0.5722792", "0.5722792", "0.57219285", "0.57205766", "0.57204914", "0.57201666", "0.571522", "0.5710768", "0.57028747", "0.57012755", "0.5689222", "0.56879705", "0.5679572", "0.56776386", "0.56768256", "0.56750995", "0.5673366", "0.5670174", "0.5669803", "0.56683433", "0.5665587", "0.5662163", "0.5656978", "0.56516725", "0.5641861", "0.56401044", "0.56212693", "0.56212693", "0.5617579", "0.56148446", "0.56132144", "0.56097573", "0.5604532", "0.5598513" ]
0.85015506
0
Encode a position given in float arguments latitude, longitude to a geohash which will have the character count precision.
def geohash_encode(latitude, longitude, precision=12): lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0) base32 = '0123456789bcdefghjkmnpqrstuvwxyz' geohash = [] bits = [16, 8, 4, 2, 1] bit = 0 ch = 0 even = True while len(geohash) < precision: if even: mid = (lon_interval[0] + lon_interval[1]) / 2 if longitude > mid: ch |= bits[bit] lon_interval = (mid, lon_interval[1]) else: lon_interval = (lon_interval[0], mid) else: mid = (lat_interval[0] + lat_interval[1]) / 2 if latitude > mid: ch |= bits[bit] lat_interval = (mid, lat_interval[1]) else: lat_interval = (lat_interval[0], mid) even = not even if bit < 4: bit += 1 else: geohash += base32[ch] bit = 0 ch = 0 return ''.join(geohash)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encode(latitude, longitude, precision=12):\n lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0)\n geohash = []\n bits = [ 16, 8, 4, 2, 1 ]\n bit = 0\n ch = 0\n even = True\n while len(geohash) < precision:\n if even:\n mid = (lon_interval[0] + lon_interval[1]) / 2\n if longitude > mid:\n ch |= bits[bit]\n lon_interval = (mid, lon_interval[1])\n else:\n lon_interval = (lon_interval[0], mid)\n else:\n mid = (lat_interval[0] + lat_interval[1]) / 2\n if latitude > mid:\n ch |= bits[bit]\n lat_interval = (mid, lat_interval[1])\n else:\n lat_interval = (lat_interval[0], mid)\n even = not even\n if bit < 4:\n bit += 1\n else:\n geohash += __base32[ch]\n bit = 0\n ch = 0\n return ''.join(geohash)", "def encode(latitude, longitude, precision=12):\r\n lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0)\r\n geohash = []\r\n bits = [ 16, 8, 4, 2, 1 ]\r\n bit = 0\r\n ch = 0\r\n even = True\r\n while len(geohash) < precision:\r\n if even:\r\n mid = (lon_interval[0] + lon_interval[1]) / 2\r\n if longitude > mid:\r\n ch |= bits[bit]\r\n lon_interval = (mid, lon_interval[1])\r\n else:\r\n lon_interval = (lon_interval[0], mid)\r\n else:\r\n mid = (lat_interval[0] + lat_interval[1]) / 2\r\n if latitude > mid:\r\n ch |= bits[bit]\r\n lat_interval = (mid, lat_interval[1])\r\n else:\r\n lat_interval = (lat_interval[0], mid)\r\n even = not even\r\n if bit < 4:\r\n bit += 1\r\n else:\r\n geohash += __base32[ch]\r\n bit = 0\r\n ch = 0\r\n return ''.join(geohash)", "def _encode(lat_val, lng_val, length=12):\r\n lat_bits = _coordinate2bits(lat_val, -90, 90, length * 5 // 2)\r\n lng_bits = _coordinate2bits(lng_val, -180, 180, (length * 5 + 1) // 2)\r\n bits = ''.join(itertools.chain.from_iterable(\r\n itertools.zip_longest(lng_bits, lat_bits, fillvalue='')))\r\n numbers = [int(bits[i:i+5], 2) for i in range(0, len(bits), 5)]\r\n hashstr = ''.join(BASE32[i] for i in numbers)\r\n return hashstr", "def encode(lat_val, lng_val, length=12):\r\n hashstr = ''\r\n lat_lo, lat_hi = -90, 90\r\n lng_lo, lng_hi = -180, 180\r\n is_lng = True\r\n masks = [16, 8, 4, 2, 1] # use bit operation to make base32 convert fast\r\n\r\n d = 0\r\n bit = 0\r\n while len(hashstr) < length:\r\n if is_lng:\r\n mid = (lng_lo + lng_hi) / 2\r\n if lng_val > mid:\r\n d |= masks[bit]\r\n lng_lo = mid\r\n else:\r\n lng_hi = mid\r\n else:\r\n mid = (lat_lo + lat_hi) / 2\r\n if lat_val > mid:\r\n d |= masks[bit]\r\n lat_lo = mid\r\n else:\r\n lat_hi = mid\r\n\r\n is_lng = not is_lng\r\n if bit < 4:\r\n bit += 1\r\n else:\r\n hashstr += BASE32[d]\r\n bit = 0\r\n d = 0\r\n return hashstr", "def coords2geohash_dec(*, lat: float, lon: float, pre: int = 6) -> int:\n return hash2dec(encoder(lat, lon, pre))", "def _encode_pos(x, y):\n return struct.pack(_ENCODE_POS, x, y)", "def encode_position(xs):\n return standard_b64encode(twoside_encode(xs)).rstrip('=')", "def _encode_pos(self, x, y):\n return struct.pack(_ENCODE_POS, x, y)", "def _encode_and_store_(self, latitude, longitude, ID):\n hash = geohash.encode(latitude=latitude, longitude=longitude)\n self.storage[hash] = ID\n self.points_by_id[ID] = (latitude, longitude)", "def encodeCoordinate(number):\n \n number = round(number, 4) # Rounding the coordinate to 4 decimal places, equivalent to a precision of 10m \n number = int(number * 10000) # Multiplying the coordinate by 10000 in order to transform to an integer\n \n array = [None]*3 # Creating an array to store the bytes \n \n if number < 0 : # The if statement treats the case when the coordinate is negative \n number = -number\n array[0] = (number>>16) & 0xff | 0b10000000 # we fill the first byte of the encoded message and the 24th bit is turned to 1 to signify a negative number \n else :\n array[0] = (number>>16) & 0xff # filling byte 0\n\n array[1] = (number>>8) & 0xff # filling byte 1\n array[2] = number & 0xff # filling byte 2\n\n return bytes(array) # returning the coordinate in byte format, necessary for LoRa transmition ", "def encode(coordinates, precision=5, geojson=False):\r\n return PolylineCodec().encode(coordinates, precision, geojson)", "def _encode_pos(self, x, y):\n return struct.pack(self._bounds_encoding, x, y)", "def _decode(geohash):\n lat_val, lng_val, lat_err, lng_err = _decode_val_err(geohash)\r\n precision = _get_precision(lng_err)\n lat_val = \"%.*f\" % (precision, lat_val)\r\n lng_val = \"%.*f\" % (precision, lng_val)\r\n return lat_val, lng_val", "def encode(self) -> bytes:\n\n coordinate = self.column % (2 ** Protocol.Formats.COORDINATE_DELIMITER)\n coordinate += (self.row << Protocol.Formats.COORDINATE_DELIMITER)\n\n encoded_message = struct.pack(Protocol.Formats.COORDINATE_FORMAT, coordinate)\n\n return encoded_message", "def get_position_geohash(points):\n\n # takes in a list as a parameter of [(lat, lng) ... (lat, lng)]\n coords_data = [] # to store the dictionary generated\n\n # do something like a for loop over here\n for point in points:\n geohash_sql = \"SELECT * \" + \\\n \"FROM nyc_crimes_by_geohash \" + \\\n \"WHERE geohash=\" + \\\n \"ST_GeoHash(st_makepoint(%s, %s), 7);\" % \\\n (point[0], point[1])\n\n # execute the raw sql, and there should only be one result... so get that.\n geohash_query = db.engine.execute(geohash_sql).fetchone()\n\n if geohash_query is None:\n # if the geohash isn't found, need to do something,\n # query PostGIS for the geohash (not in db)\n # then assume that there are no crimes in the area\n geohash_of_point = \"SELECT ST_GeoHash(geometry(Point(%s, %s)), 7);\" \\\n % (point[0], point[1])\n\n geohash_found = db.engine.execute(geohash_of_point).fetchone()\n\n geohash_query = [0, geohash_found[0], 0, 0.0]\n\n geohash_query_data = {\n 'geohash': geohash_query[1],\n 'total_crimes': geohash_query[2],\n 'crime_index': float(geohash_query[3]),\n 'point': point\n }\n coords_data.append(geohash_query_data)\n\n # return something like [{dicte}, {dictw}], or {dict}, based on total pts\n return coords_data", "def encode_pos(i, j):\n return 3 * i + j", "def get_hash(self):\n s = super(Point, self).get_hash()\n for c in self.coordinate:\n s += \"_%f\" % c\n return s", "def encode_geometry(geom: BasePolygon) -> str:\n encoded_geom = geobuf.encode(mapping(geom)).hex()\n\n # if the geometry is so complex is still goes over the limit, incrementally attempting to simplify it\n if sys.getsizeof(encoded_geom) > LAMBDA_ASYNC_PAYLOAD_LIMIT_BYTES:\n encoded_geom = geobuf.encode(\n mapping(geom.simplify(0.005, preserve_topology=False))\n ).hex()\n\n if sys.getsizeof(encoded_geom) > LAMBDA_ASYNC_PAYLOAD_LIMIT_BYTES:\n encoded_geom = geobuf.encode(\n mapping(geom.simplify(0.01, preserve_topology=False))\n ).hex()\n\n return encoded_geom", "def decode(geohash):\r\n try:\r\n lat_val, lng_val, lat_err, lng_err = decode_val_err(geohash)\r\n precision = _get_precision(lng_err)\r\n lat_val = \"%.*f\" % (precision, lat_val)\r\n lng_val = \"%.*f\" % (precision, lng_val)\r\n return lat_val, lng_val\r\n except:\r\n print(\"Unable to decode!\") # TODO better error message\r", "def bus_write_latitude(self, latitude_param: float) -> float:\n latitude_param = Base.check_float_param(latitude_param)\n return self.dss_obj.BUSF(13, ctypes.c_double(latitude_param))", "def hash_str(c, hash_length):\n if isinstance(c, float):\n if numpy.isnan(c):\n return c\n raise ValueError(f\"numpy.nan expected, not {c}\")\n m = hashlib.sha256()\n m.update(c.encode(\"utf-8\"))\n r = m.hexdigest()\n if len(r) >= hash_length:\n return r[:hash_length]\n return r", "def hash_float(c, hash_length):\n if numpy.isnan(c):\n return c\n else:\n b = struct.pack(\"d\", c)\n m = hashlib.sha256()\n m.update(b)\n r = m.hexdigest()\n if len(r) >= hash_length:\n r = r[:hash_length]\n i = int(r, 16) % (2 ** 53)\n return float(i)", "def coordinates_str(info_df: DataFrame, lat: float, lon: float) -> str:\n lat_precision = attribute_value(info_df, \"geospatial_lat_resolution\")\n lat_value = str(round_to(lat, lat_precision)).split(\".\")\n\n lat_str = (\n f\"[({lat_value[0]}.{lat_value[1][:2]}):1:({lat_value[0]}.{lat_value[1][:2]})]\"\n )\n\n lon_precision = attribute_value(info_df, \"geospatial_lon_resolution\")\n lon_value = str(round_to(lon, lon_precision)).split(\".\")\n\n lon_str = (\n f\"[({lon_value[0]}.{lon_value[1][:2]}):1:({lon_value[0]}.{lon_value[1][:2]})]\"\n )\n\n return lat_str + lon_str", "def encode_positions(self,\n positions: mx.sym.Symbol,\n data: mx.sym.Symbol) -> mx.sym.Symbol:\n pass", "def _fixupPosition(self, position):\n if \"latitudeI\" in position:\n position[\"latitude\"] = position[\"latitudeI\"] * 1e-7\n if \"longitudeI\" in position:\n position[\"longitude\"] = position[\"longitudeI\"] * 1e-7", "def float_encode(self, value):\n if value < 128:\n code = value\n elif value > 31743:\n code = 255\n else:\n exp=0\n value>>=3\n while(value>31):\n exp+=1\n value>>=1\n exp<<=4\n code = 0x80 | exp | (value & 0x0F)\n return code", "def _generate_compressed_position(cls, latitude: float, longitude: float, symbol_table: str,\n symbol_id: str, altitude: int = None, course: int = None,\n speed: int = None, radio_range: int = None,\n fix: CompressionFix = CompressionFix.OLD,\n source: CompressionSource = CompressionSource.OTHER,\n origin: CompressionOrigin = CompressionOrigin.SOFTWARE\n ) -> str:\n\n lat = APRSUtils.encode_compressed_latitude(latitude)\n lng = APRSUtils.encode_compressed_longitude(longitude)\n\n if course is not None and speed is not None:\n c = chr((course // 4) + 33)\n s = chr(round(math.log((speed + 1), 1.08)) + 33)\n t = cls._generate_compressed_byte(fix, source, origin)\n\n elif altitude is not None and altitude >= 1.0:\n # First get the exponent\n exp = round(math.log(altitude, 1.002))\n\n # The exponent is converted back into two numbers that fit the following equation:-\n # a * 91 + b = exp\n # Values must translate (after having 33 added to them) to a printable ASCII character.\n # This means we're limited to between 0 and 93 (33 and 126).\n a = None\n for b in range(0, 94):\n if (exp - b) % 91 == 0 and (exp - b) // 91 < 94:\n a = (exp - b) // 91\n break\n else:\n raise GenerateError(\"Could not encode altitude ({}ft)\".format(altitude))\n\n c = chr(a + 33)\n s = chr(b + 33)\n\n source = CompressionSource.GGA\n t = cls._generate_compressed_byte(fix, source, origin)\n\n elif radio_range is not None:\n # The first character is always {\n c = \"{\"\n\n # The range is encoded as the exponent of the following:-\n # 2 * (1.08 ** exp)\n exp = round(math.log((radio_range/2), 1.08))\n s = chr(exp + 33)\n\n t = cls._generate_compressed_byte(fix, source, origin)\n\n else:\n c = \" \"\n s = \"s\"\n t = \"T\"\n\n return f\"{symbol_table}{lat}{lng}{symbol_id}{c}{s}{t}\"", "def get_hash(self):\n s = super(BoundingBox, self).get_hash()\n for c in self.start:\n s += \"_%f\" % c\n for c in self.size:\n s += \"_%f\" % c\n return s", "def geohash_dec2coords(*, geohash_dec: int, pre: int = 6) -> Tuple[float, float]:\n res = decoder(dec2hash(geohash_dec, pre=pre))\n return round(sum(res[0]) / 2, max(3, pre - 3)), round(\n sum(res[1]) / 2, max(3, pre - 3)\n )", "def gpoly_encode(points):\n try:\n import gpolyencode\n encoder = gpolyencode.GPolyEncoder()\n except Exception as err:\n return dict(error='%s: %s' % (type(err), err, ))\n else:\n return encoder.encode(points)", "def _geohash2bits(geohash):\r\n bits = ''.join([_char2bits(c) for c in geohash])\r\n return bits", "def _format_point_postgis(lat: float, lon: float) -> Point:\n return sa.cast(\"POINT({} {})\".format(lon, lat), ga.types.Geography)", "def hash_point(self, point) -> int:\n\n hash_value = 7\n hash_value = 53 * hash_value + hash(point.id)\n hash_value = 53 * hash_value + hash(point.cat)\n hash_value = 53 * hash_value + int(point.lat * point.lat)\n hash_value = 53 * hash_value + int(point.lon * point.lon)\n return hash_value", "def _coord_to_bin(self,code):\n\t\tbinary = \"\"\n\t\tfor num in code:\n\t\t\tbinary += '{0:02b}'.format(int(num))\n\t\tassert ( len(binary) == 16 )\n\t\treturn binary", "def encode(self, compressed, hash160=False):\n # calculate the bytes\n if compressed:\n prefix = b'\\x02' if self.y % 2 == 0 else b'\\x03'\n pkb = prefix + self.x.to_bytes(32, 'big')\n else:\n pkb = b'\\x04' + self.x.to_bytes(32, 'big') + self.y.to_bytes(32, 'big')\n # hash if desired\n return ripemd160(sha256(pkb)) if hash160 else pkb", "def calc_statistics_hash(self) -> bytes:\n return b\"somehash\"", "def insert_fix(**kwargs):\n kwargs['fix_center'] = ut.mk_point(lon=kwargs['lon'], lat=kwargs['lat'], alt=0)\n \n \"\"\"\n sql = \"insert into fix(ident, major, point) values(\"\n sql += \"%(ident)s, %(major)s, \"\n sql += \"ST_Transform(ST_GeomFromText(%(fix_center)s, 4326),3857));\"\n \"\"\"\n sql = \"insert into navaid(ident, name, search, ntype_id, point) values(\"\n sql += \"%(ident)s, %(ident)s, %(ident)s, 201, \"\n sql += \"ST_Transform(ST_GeomFromText(%(fix_center)s, 4326),3857));\"\n #print sql\n db.Cur.execute(sql, kwargs)", "def pubkey(self, address : str) -> str:\n account_map = \"13456789abcdefghijkmnopqrstuwxyz\"\n account_lookup = {}\n for i in range(0,32): #make a lookup table\n account_lookup[account_map[i]] = BitArray(uint=i,length=5)\n acrop_key = address[-60:-8] #leave out prefix and checksum\n number_l = BitArray() \n for x in range(0, len(acrop_key)): \n number_l.append(account_lookup[acrop_key[x]]) \n number_l = number_l[4:] # reduce from 260 to 256 bit\n result = number_l.hex.upper()\n return result", "def encode_ga(addr: Union[str, GroupAddress]) -> int:\n def conv(main, middle, sub):\n return (int(main) << 11) + (int(middle) << 8) + int(sub)\n\n if isinstance(addr, str):\n parts = addr.split('/')\n if len(parts) == 3:\n return conv(parts[0], parts[1], parts[2])\n elif isinstance(addr, GroupAddress):\n return conv(addr.main, addr.middle, addr.sub)\n raise ValueError", "def encode(num):\n # Check the number is within our working range\n if num > SIZE: return None\n if num < 0: return None\n\n return friendly_number(perfect_hash(num))", "def floatify(latlon):\n sign = -2. * (latlon[-1].lower() in ['s', 'w']) + 1\n return float(latlon[:-1]) * sign", "def floatify(latlon):\n sign = -2. * (latlon[-1].lower() in ['s', 'w']) + 1\n return float(latlon[:-1]) * sign", "def _generate_uncompressed_position(latitude: float, longitude: float, symbol_table: str,\n symbol_id: str, ambiguity: int = 0) -> str:\n # Encode the latitude\n lat = APRSUtils.encode_uncompressed_latitude(latitude, ambiguity)\n\n # Encode the longitude\n lng = APRSUtils.encode_uncompressed_longitude(longitude, ambiguity)\n\n logger.debug(\"Latitude: {} ({}) Longitude: {}\".format(\n lat, ambiguity, lng\n ))\n\n # Parse the symbol table\n logger.debug(\"Symbol table: {}\".format(symbol_table))\n logger.debug(\"Symbol ID: {}\".format(symbol_id))\n\n info = f\"{lat}{symbol_table}{lng}{symbol_id}\"\n\n return info", "def poly_hash(text, p, x):\n hash = 0\n for ch in reversed(text):\n hash = (hash * x + ord(ch)) % p\n\n return hash", "def upload_point(x, y, label=\"\"):\n\n conn = None\n cur = None\n\n try:\n # check the point is inside the usa, both point and states must be WGS84\n conn = utils.pgconnect(**settings.DEFAULT_CONNECTION)\n cur = conn.cursor()\n #if the point is inside this will return (True,) otherwise None\n cur.execute(\"\"\"select result from\n (select st_contains(s.geom,ST_GeomFromText('POINT(%s %s)', 4326)) as result \n from %s as s) as subquery\n where result is true\"\"\",(AsIs(x),AsIs(y), AsIs(settings.STATES_TABLE_NAME)))\n\n result = cur.fetchone()\n #print(result)\n\n if result: # if result is not None\n\n #check numbers size, crop to 4 digits, define the marker size\n\n # size symbol\n size=None\n\n # store number of decimal digits\n lx = 0\n ly = 0\n\n # convert numbers to string\n #x = str(x);y = str(y)\n\n if ',' in x or ',' in y:\n raise Exception(\"decimal numbers should not contain ','\")\n\n # check the number of decimal digits and crop to 4\n if '.' in x: # do only for float number\n lx = len(x.split('.')[1]) # get decimals\n if lx > 4: # crop size to 4\n x = x[:(4 - lx)]\n lx = 4\n if '.' in y: # do only for float number\n ly = len(y.split('.')[1])\n if ly > 4:\n y = y[:(4 - ly)]\n ly = 4\n\n # select a symbol size according\n # for the size take the bigger number of digits of the two numbers\n ndigits = max([lx, ly])\n if ndigits == 0:\n size = 5\n elif ndigits == 1:\n size = 4\n elif ndigits == 2:\n size = 3\n elif ndigits == 3:\n size = 2\n elif ndigits == 4:\n size = 1\n\n #upload to database\n cur.execute(\n \"\"\"INSERT INTO %s(lat,lon,label,size) VALUES (%s,%s,%s,%s) RETURNING id\"\"\",\n ( AsIs(settings.BOOKMARKS_TABLE_NAME), y, x, label, size))\n #id = cur.fetchone()[0]\n #print(id)\n cur.execute(\"\"\"UPDATE %s SET geom = ST_PointFromText('POINT(' || lon || ' ' || lat || ')', 4326)\"\"\", (AsIs(settings.BOOKMARKS_TABLE_NAME),))\n conn.commit()\n\n else:\n raise Exception(\"the point is not inside USA\")\n\n except Exception as e:\n raise Exception(e)\n\n else:\n return x, y, size #return the cropped coordinates and marker size\n\n finally:\n if cur: cur = None\n if conn: conn = None", "def cart_to_gps_dist(cart_dist):\n\tdeg_per_km = 0.00905711\n\treturn cart_dist * deg_per_km", "def base64_pub_encode(self, key):\n (y, g, p, q) = (str(key.y), str(key.g), str(key.p), str(key.q))\n return base64.b64encode((y + \",\" + g + \",\" + p + \",\" + q).encode('utf-8')).decode('utf-8')", "def encode_num(num):\n\tsnuml = list(str(num))\n\tif len(snuml)%2 == 1:\n\t\tsnuml += 'f'\n\tfor i in range(len(snuml)):\n\t\tif i % 2 == 0:\n\t\t\tsnuml[i], snuml[i+1] = snuml[i+1], snuml[i]\n\t# Below, ext=1 for some reason; use unknown numbering type, unknown\n\t# numbering plan.\n\tenc_num = (\n\t\tto_hex2(len(snuml)/2 + 1) + \"81\" + ''.join(snuml)\n\t)\n\treturn(enc_num)", "def N(latitude):\n return a/math.sqrt(1-e2*pow(math.sin(latitude),2.0))", "def __encode_importent_info(info):\r\n return hashlib.sha256(str(info).encode()).hexdigest()", "def make_address(pubkeys, n):\n return (str(len(pubkeys)) + str(n) +\n base58_encode(det_hash({str(n): pubkeys}))[0:29])", "def _point_hash((x, y)):\n x, y = map(float, (x, y))\n return hash((atan2(x, y), hypot(x, y)))", "def _coordinate2bits(val, lo, hi, length):\r\n bits = ''\r\n while len(bits) < length:\r\n mid = (lo + hi) / 2\r\n if val > mid:\r\n bits += '1'\r\n lo = mid\r\n else:\r\n bits += '0'\r\n hi = mid\r\n return bits", "def test_query_params_geo_position(session, params, expected_number_of_hits):\n result = get_search(session, params)\n compare(result['total']['value'], expected_number_of_hits)", "def hash(self, text):\n hashval = 0\n for i in xrange(0, len(text)):\n hashval += ord(text[i])**i\n return hashval", "def encode(self, x, y, pizza_loc, dest_idx):\n i = x\n i *= self.size\n i += y\n i *= self.pizza_space\n i += pizza_loc\n i *= len(self.homes)\n i += dest_idx\n\n return i", "def hash_int(c, hash_length):\n if isinstance(c, float):\n if numpy.isnan(c):\n return c\n else:\n raise ValueError(f\"numpy.nan expected, not {c}\")\n else:\n b = struct.pack(\"i\", c)\n m = hashlib.sha256()\n m.update(b)\n r = m.hexdigest()\n if len(r) >= hash_length:\n r = r[:hash_length]\n return int(r, 16) % (10 ** 8)", "def format_short_geocommit(self):\r\n geocommit = \"geocommit(1.0): \"\r\n geocommit += self.format_geocommit(\" \", \", \")\r\n geocommit += \";\"\r\n\r\n return geocommit", "def lonlat_to_osgb (lon, lat, digits=3):\n\t# NOTE: last test actually fails, due to being off by 1. That's 1\n\t# metre, and I'm not going to worry about it.\n\teast, north = lonlat_to_eastnorth (lon, lat)\n\treturn eastnorth_to_osgb (east, north, digits)", "def p2pkh_address(Q: Point,\n compressed: bool,\n version: bytes = b'\\x00') -> bytes:\n\n vh160 = version + h160_from_pubkey(Q, compressed)\n return base58.encode_check(vh160)", "async def test_floating_point_encoding(self, r):\n await r.flushdb()\n timestamp = 1349673917.939762\n await r.zadd('a', timestamp, 'a1')\n assert await r.zscore('a', 'a1') == timestamp", "def encode_v33_ce(geometry: GeomDict, identifier: str) -> Element:\n\n return GML33CE_ENCODER.encode(geometry, identifier)", "def add_geo_hashes(self, geohashes: Iterable[str], **kwargs):\n geo_entities = []\n for geohash in geohashes:\n decoded_location = decode_geo_hash(geohash)\n geo_entities.append(\n GeoLocation(Latitude=decoded_location[0], Longitude=decoded_location[1])\n )\n\n self.add_geoloc_cluster(geo_locations=geo_entities, **kwargs)", "def encode(self):\n\n # Start from occupancy\n encoding = self.occupancy.copy();\n\n # Add goals\n for g in self.goals:\n if g in self.discovered_goals:\n encoding[self.tocellcoord[g]] += 10\n else:\n encoding[self.tocellcoord[g]] += 100\n\n # Add agents\n for pos in self.currstate:\n encoding[self.tocellcoord[pos]] += 2\n\n return encoding", "def geocentric2geodetic(latitude):\n\n return np.rad2deg(np.arctan(1.0067395 * np.tan(np.deg2rad(latitude))))", "def encode (params):\n \n bvList = []\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 1), 6))\n \n if 'RepeatIndicator' in params:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['RepeatIndicator']), 2))\n else:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 0), 2))\n \n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['MMSI']), 30))\n \n if 'NavigationStatus' in params:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['NavigationStatus']), 4))\n else:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 15), 4))\n \n if 'ROT' in params:\n bvList.append(aisbinary.bvFromSignedInt(params['ROT'], 8))\n else:\n bvList.append(aisbinary.bvFromSignedInt(-128, 8))\n \n if 'SOG' in params:\n bvList.append(\n aisbinary.setBitVectorSize(BitVector(intVal = int((Decimal(params['SOG']) * Decimal('10')))), 10))\n else:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = int(1023)), 10))\n \n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['PositionAccuracy']), 1))\n \n if 'longitude' in params:\n bvList.append(aisbinary.bvFromSignedInt(int(Decimal(params['longitude']) * Decimal('600000')), 28))\n else:\n bvList.append(aisbinary.bvFromSignedInt(108600000, 28))\n \n if 'latitude' in params:\n bvList.append(aisbinary.bvFromSignedInt(int(Decimal(params['latitude']) * Decimal('600000')), 27))\n else:\n bvList.append(aisbinary.bvFromSignedInt(54600000, 27))\n if 'COG' in params:\n bvList.append(\n aisbinary.setBitVectorSize(BitVector(intVal = int((Decimal(params['COG']) * Decimal('10')))), 12))\n else:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = int(3600)), 12))\n if 'TrueHeading' in params:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['TrueHeading']), 9))\n else:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 511), 9))\n if 'TimeStamp' in params:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['TimeStamp']), 6))\n else:\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 60), 6))\n \n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 0), 4))\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = 0), 1))\n \n if params[\"RAIM\"]:\n bvList.append(TrueBV)\n else:\n bvList.append(FalseBV)\n \n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['state_syncstate']), 2))\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['state_slottimeout']), 3))\n bvList.append(aisbinary.setBitVectorSize(BitVector(intVal = params['state_slotoffset']), 14))\n \n return aisbinary.joinBV(bvList)", "def encode(self, password, salt):\n raise NotImplementedError()", "def map_geo_hashed_value(l):\n \n l = sorted(l)\n return {k: index for index, k in enumerate(l)}", "def to_hash(self):\n city = City.get(City.id == self.city)\n owner = User.get(User.id == self.owner)\n place = {\n 'owner_id' : owner.id,\n 'city_id' : city.id,\n 'name' : self.name,\n 'description' : self.description,\n 'number_rooms' : self.number_rooms,\n 'number_bathrooms' : self.number_bathrooms,\n 'max_guest' : self.max_guest,\n 'price_by_night' : self.price_by_night,\n 'latitude' : self.latitude,\n 'longitude' : self.longitude\n }\n return super(Place, self).to_hash(self, place)", "def insert_hash(self, title, artist, song_id:int, fingerprint: str, offset:int):\n fingerprint = Fingerprints(song_id=song_id, song_title=title, artist=artist, hash=fingerprint, offset=offset)\n fingerprint.save()", "def encode(self, timestamp: int) -> int:\n return timestamp", "def encode_password(password):\r\n password = password.encode()\r\n password_encode = hashlib.sha256(password).hexdigest()\r\n return password_encode", "def proximity_search(self, latitude, longitude, radius):\n\n hashcode = geohash.encode(latitude=latitude, longitude=longitude)\n centerpoint = (latitude, longitude)\n\n tmp_hashcode = ''\n for x in hashcode:\n # Go through the hashcode character by character\n tmp_hashcode += x\n lat, lng, delta_lat, delta_lng = geohash.decode(tmp_hashcode,\n delta=True)\n overall_lat = 2 * 1000 * haversine(\n point1=(latitude - delta_lat, longitude),\n point2=(latitude + delta_lat, longitude)\n )\n overall_lng = 2 * 1000 * haversine(\n point1=(latitude, longitude-delta_lng),\n point2=(latitude, longitude+delta_lng)\n )\n\n dist = min(overall_lng, overall_lat)\n if dist < radius:\n tmp_hashcode = tmp_hashcode[:-1]\n break\n\n if tmp_hashcode == '':\n raise ValueError('Radius larger than earth')\n\n precision = len(tmp_hashcode)\n\n search_hashes = self._get_adjoining_hashes(hashcode=hashcode,\n precision=precision)\n search_hashes.append(tmp_hashcode)\n\n possible_points = []\n result_values = []\n\n for search_hash in search_hashes:\n possible_points.extend(self.storage.values(prefix=search_hash))\n\n for point_id in possible_points:\n point = self.points_by_id[point_id]\n dist = 1000 * haversine(centerpoint, point)\n if dist <= radius:\n result_values.append((point_id, dist))\n\n sorted_results = sorted(result_values, key = lambda x: x[1])\n final_results = [x[0] for x in sorted_results]\n return final_results", "def OSGB36toWGS84(lat, lng):\n\n a = 6377563.396\n b = 6356256.909\n eSquared = ab2ecc(a, b)\n\n phi = math.radians(lat)\n lmb = math.radians(lng)\n\n v = a / (math.sqrt(1 - eSquared * sinSquared(phi)))\n H = 0\n x = (v + H) * math.cos(phi) * math.cos(lmb)\n y = (v + H) * math.cos(phi) * math.sin(lmb)\n z = ((1 - eSquared) * v + H) * math.sin(phi)\n\n tx = 446.448\n ty = -124.157\n tz = 542.060\n s = -0.0000204894\n rx = math.radians(0.00004172222)\n ry = math.radians(0.00006861111)\n rz = math.radians(0.00023391666)\n\n xB = tx + (x * (1 + s)) + (-rx * y) + (ry * z)\n yB = ty + (rz * x) + (y * (1 + s)) + (-rx * z)\n zB = tz + (-ry * x) + (rx * y) + (z * (1 + s))\n\n a = 6378137.000\n b = 6356752.3141\n eSquared = ab2ecc(a, b)\n\n lambdaB = math.degrees(math.atan(yB / xB))\n p = math.sqrt((xB * xB) + (yB * yB))\n phiN = math.atan(zB / (p * (1 - eSquared)))\n for i in xrange(1,10):\n v = a / (math.sqrt(1 - eSquared * sinSquared(phiN)))\n phiN1 = math.atan((zB + (eSquared * v * math.sin(phiN))) / p)\n phiN = phiN1\n\n phiB = math.degrees(phiN)\n\n return (phiB, lambdaB)", "def get_single_location(chrom, pos):\n return CHROMOSOME_TO_CODE[chrom] * int(1e9) + pos", "def hashmaptostring(hashmap: Dict[Tuple[int, int], Gridspace], nrows: int, ncols: int) -> str:\n\n return \"\\n\".join(\" \".join(hashmap[(r, c)].hint for c in range(ncols)) for r in range(nrows))", "def coords_dict_to_coords_string(coords):\n longitude, latitude = None, None\n for k,v in coords.items():\n if \"at\" in k:\n latitude = v\n if \"ong\" in k:\n longitude = v\n if not longitude and latitude:\n print(\"Unable to identify longitude and latitude keys\")\n return \"\"\n coords_string = \"{:.2f}_{:.2f}\".format(longitude, latitude)\n return coords_string", "def _pepper_hash(pepper, password, salt):\n return '{:0>8}{:s}{:s}'.format(pepper, password, salt)", "def encode_chromosome(in_num):\n convert_dict = {23: \"X\", 24: \"Y\", 25: \"MT\"}\n return convert_dict[in_num] if in_num in convert_dict else str(in_num)", "def geocode(address):\n\n mapsurl = ('http://maps.googleapis.com/maps/api/geocode/xml?address=' +\n address.replace(' ', '+') + '&sensor=false')\n\n coords = urllib.urlopen(mapsurl).read()\n root = etree.fromstring(coords)\n coordstr = (0, 0)\n loc = root.find(\".//location\")\n if not loc is None:\n coordstr = (loc[1].text, loc[0].text)\n return coordstr", "def format_long_geocommit(self):\r\n geocommit = \"geocommit (1.0)\\n\"\r\n geocommit += self.format_geocommit(\": \", \"\\n\")\r\n geocommit += \"\\n\\n\"\r\n\r\n return geocommit", "def spatial_entropy(map_):\n map_ = map_ / np.sum(map_)\n return -1 * np.sum(map_ * np.log(map_))", "def _create_geo_location(self, longitude, latitude):\n if longitude and latitude:\n return {\n 'lon': longitude,\n 'lat': latitude\n }", "def geometry_hash(geometry):\n if hasattr(geometry, 'md5'):\n # for most of our trimesh objects\n md5 = geometry.md5()\n elif hasattr(geometry, 'tostring'):\n # for unwrapped ndarray objects\n md5 = str(hash(geometry.tostring()))\n\n if hasattr(geometry, 'visual'):\n # if visual properties are defined\n md5 += str(geometry.visual.crc())\n return md5", "def lat_lng(row):\r\n lat = row[\"latitude\"]\r\n lng = row[\"longitude\"]\r\n n = int(lat/GRANULARITY)\r\n nlat_start = n * GRANULARITY\r\n nlat_end = nlat_start + GRANULARITY\r\n nlg=int(lng/GRANULARITY)\r\n nlng_start = nlg * GRANULARITY\r\n nlng_end = nlng_start + GRANULARITY\r\n latlng=[(nlat_start,nlng_start), (nlat_start,nlng_end), (nlat_end,nlng_end), (nlat_end,nlng_start)]\r\n return latlng", "def Encrypt(self, plaintext, r_value=None):\n\n if not isinstance(plaintext, int) and not isinstance(plaintext, long):\n raise ValueError('Expected int or long type plaintext but got: %s' %\n type(plaintext))\n r = r_value or self._GetRandomFromZNStar(N_LENGTH, self.n)\n return (ModExp(self.g, plaintext, self.nsquare) *\n ModExp(r, self.n, self.nsquare)) % self.nsquare", "def encode_extra_field(self, relative_orbit):\n if isinstance(relative_orbit, int):\n return \"{:03d}\".format(relative_orbit)\n else:\n return relative_orbit", "def wkb_hex(self): # -> str:\n ...", "def partition_geocode(con: sqlite3.Connection, cur: sqlite3.Cursor, quarter: str, county_cht: str):\n cur.execute('''SELECT ๅœŸๅœฐๅ€ๆฎตไฝ็ฝฎๆˆ–ๅปบ็‰ฉๅ€้–€็‰Œ FROM \"{0}/TRX\"\n WHERE ็ธฃๅธ‚ = ?\n GROUP BY ๅœŸๅœฐๅ€ๆฎตไฝ็ฝฎๆˆ–ๅปบ็‰ฉๅ€้–€็‰Œ;'''.format(quarter), (county_cht,))\n for address, in cur.fetchall():\n cur.execute('''SELECT GEO.็ทจ่™Ÿ\n FROM \"{0}/TRX\" AS TRX, \"{0}/GEO\" AS GEO\n WHERE TRX.็ทจ่™Ÿ = GEO.็ทจ่™Ÿ\n AND TRX.ๅœŸๅœฐๅ€ๆฎตไฝ็ฝฎๆˆ–ๅปบ็‰ฉๅ€้–€็‰Œ = ?\n AND GEO.LAT_Avg ISNULL;'''.format(quarter), (address,))\n identities = cur.fetchall()\n if not identities:\n continue\n print(\"[%d] \"%(len(identities)) + address)\n try:\n results = selective_geocode(address)\n except geo.AddressError:\n continue\n if len(results[\"lat\"]) != 5 or len(results[\"lon\"]) != 5:\n continue\n results[\"lat\"].append(sum(results[\"lat\"]) / len(results[\"lat\"]))\n results[\"lon\"].append(sum(results[\"lon\"]) / len(results[\"lon\"]))\n combined = [num for zipped in zip(results[\"lat\"], results[\"lon\"]) for num in zipped]\n values = [(tuple(combined) + identity) for identity in identities]\n cur.executemany('''UPDATE \"{0}/GEO\" SET\n LAT_1 = ?, LON_1 = ?,\n LAT_2 = ?, LON_2 = ?,\n LAT_3 = ?, LON_3 = ?,\n LAT_4 = ?, LON_4 = ?,\n LAT_5 = ?, LON_5 = ?,\n LAT_Avg = ?, LON_Avg = ?\n WHERE ็ทจ่™Ÿ = ?;'''.format(quarter), values)\n con.commit()", "def latitude(self, lat):\n data = float(lat[1:])\n if lat[0] == \"N\":\n return data\n else:\n return -data", "def hash_to_point(self, message, salt):\r\n n = self.n\r\n if q > (1 << 16):\r\n raise ValueError(\"The modulus is too large\")\r\n\r\n k = (1 << 16) // q\r\n # Create a SHAKE object and hash the salt and message.\r\n shake = SHAKE256.new()\r\n shake.update(salt)\r\n shake.update(message)\r\n # Output pseudorandom bytes and map them to coefficients.\r\n hashed = [0 for i in range(n)]\r\n i = 0\r\n j = 0\r\n while i < n:\r\n # Takes 2 bytes, transform them in a 16 bits integer\r\n twobytes = shake.read(2)\r\n elt = (twobytes[0] << 8) + twobytes[1] # This breaks in Python 2.x\r\n # Implicit rejection sampling\r\n if elt < k * q:\r\n hashed[i] = elt % q\r\n i += 1\r\n j += 1\r\n return hashed", "def hash_to_point(self, message, salt):\r\n n = self.n\r\n if q > (1 << 16):\r\n raise ValueError(\"The modulus is too large\")\r\n\r\n k = (1 << 16) // q\r\n # Create a SHAKE object and hash the salt and message.\r\n shake = SHAKE256.new()\r\n shake.update(salt)\r\n shake.update(message)\r\n # Output pseudorandom bytes and map them to coefficients.\r\n hashed = [0 for i in range(n)]\r\n i = 0\r\n j = 0\r\n while i < n:\r\n # Takes 2 bytes, transform them in a 16 bits integer\r\n twobytes = shake.read(2)\r\n elt = (twobytes[0] << 8) + twobytes[1] # This breaks in Python 2.x\r\n # Implicit rejection sampling\r\n if elt < k * q:\r\n hashed[i] = elt % q\r\n i += 1\r\n j += 1\r\n return hashed", "def _normalize_location(lat: float, lon: float):\n latitude = \"{0:.3f}\".format(round(lat, 3))\n longitude = \"{0:.3f}\".format(round(lon, 3))\n return latitude + \":\" + longitude", "def test_encode_pair():\n\tassert encode_pair(0, 0) == 0\n\tassert encode_pair(1, 0) == 1\n\tassert encode_pair(0, 1) == 2\n\tassert encode_pair(4, 6) == 207", "def encode_public_key(value: PublicKey) -> bytes:\n return bytes([value.algo.value]) + value.pbk", "def __str__(self):\n struct_repr = \", \".join([\n \"latitude_deg: \" + str(self.latitude_deg),\n \"longitude_deg: \" + str(self.longitude_deg),\n \"absolute_altitude_m: \" + str(self.absolute_altitude_m),\n \"relative_altitude_m: \" + str(self.relative_altitude_m)\n ])\n\n return f\"Position: [{struct_repr}]\"", "def lat2str(x):\n \n deg = u\"\\u00B0\"\n if x<0:\n return f'{abs(x)}{deg}S'\n else:\n return f'{x}{deg}N'", "def hash(self):\r\n sign_map = AutoVivification()\r\n digest = lambda x: self.__polynomial_hash(x)\r\n # We are only doing signatures for top levels\r\n for k, v in self.iteritems():\r\n # Digested value of the string representation of \r\n # what is behind.\r\n tmp = str(v)\r\n # Removed non meaningful information from the content.\r\n # No capital L is ever used in the register namings, so it is safe to strip that too.\r\n tmp = tmp.strip().replace('{','').replace('}','').replace(':','').replace(' ','').replace('L','')\r\n value = digest(tmp)\r\n sign_map[k] = string.atoi(value, 16)\r\n \r\n return sign_map", "def calc_info_hash(self) -> bytes:\n raise NotImplementedError()", "def encode_point(format: str,\n centers: Tensor,\n pix2pix_delta: Tensor,\n anchor_wh: Tensor,\n r_point: Tensor,\n boxes2centers_mapping: Tensor,\n var: float = 0.5):\n\n if format == \"normalized_xy_offsets\":\n r_point = r_point.to(centers.device).view(r_point.size(0), -1)\n t_point = r_point[boxes2centers_mapping]\n t_point = t_point.view(centers.size(0), -1, 2)\n t_point -= centers[:, None]\n t_point /= anchor_wh[:, None] * var\n t_point = t_point.view(centers.size(0), -1, 2)\n else:\n raise NotImplementedError(\"format = {}?\".format(format))\n return t_point" ]
[ "0.7769761", "0.7752675", "0.72821146", "0.71992105", "0.6168017", "0.61100185", "0.59604454", "0.5934149", "0.5906312", "0.5894907", "0.5872776", "0.58589303", "0.56805354", "0.5602078", "0.5597624", "0.5574245", "0.5522198", "0.5442411", "0.537271", "0.5353991", "0.53291243", "0.5282451", "0.5187494", "0.51836413", "0.5172665", "0.5085475", "0.5066688", "0.5055648", "0.50394326", "0.50369775", "0.5029533", "0.4973284", "0.49626994", "0.49623597", "0.49618894", "0.4930228", "0.49115732", "0.49052113", "0.48948586", "0.48813125", "0.48642153", "0.48642153", "0.48366958", "0.48122856", "0.4794748", "0.47944987", "0.47690734", "0.47585174", "0.47487408", "0.47351012", "0.47267532", "0.4719804", "0.47103864", "0.4701976", "0.47008294", "0.4697488", "0.46944672", "0.46828637", "0.46769646", "0.46674067", "0.46643594", "0.46593016", "0.46584293", "0.46534452", "0.46519503", "0.46515214", "0.46468496", "0.46466", "0.46461326", "0.46433783", "0.46405384", "0.46399254", "0.46310657", "0.46279338", "0.46241418", "0.46190962", "0.46117505", "0.46055055", "0.46040493", "0.45984766", "0.45976594", "0.45902863", "0.45902288", "0.45824072", "0.457834", "0.45757347", "0.45722148", "0.45677665", "0.45598337", "0.4553073", "0.45480835", "0.45480835", "0.45467806", "0.45437002", "0.4535602", "0.4532477", "0.4526463", "0.4512297", "0.45095393", "0.4509439" ]
0.79064786
0
Return the header of request.
def _get_request_header() -> Dict: metas, envs = get_full_version() header = { **{f'jinameta-{k}': str(v) for k, v in metas.items()}, **envs, } return header
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_header(self):\n return self._header", "def getHeader():\n return _HEADER", "def header(self):\n return self._header", "def header(self):\n return self._header", "def header(self):\n return self._header", "def getHeader(self):\n return self.data.header", "def header(self):\r\n return self.__header", "def mail_header(self):\n return self._hdr", "def get_request_headers(self):\n return self.request_headers", "def get_header(self, name):\n return self.headers.get(name)", "def _get_request_header(request, header_name, default=''):\r\n if request is not None and hasattr(request, 'META') and header_name in request.META:\r\n return request.META[header_name]\r\n else:\r\n return default", "def get_request_headers(self):\n return getattr(self.instance, 'request_headers')", "def getHeader(self, name):\n return self.headers.get(name.lower(), None)", "def header(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"header\")", "def auth_header(self):\n return self._auth_header", "def header(self, name):\n key = name.upper()\n if key not in _RESPONSE_HEADER_DICT:\n key = name\n return self._headers.get(key)", "def header(self, header, default=None):\r\n return self._get_headers().get(header.upper(), default)", "def get_headers(self):\n \n return self.headers", "def get_header(self, key, default=None):\n\n return self._request.headers[\n key] if key in self._request.headers else default", "def header(self):\n return self[0]", "def get_headers(self, ):\n return self.attrs.get(self.AttributeNames.HEADERS, None)", "def header(self, key, default=None):\n return self._get_headers().get(key.upper(), default)", "def get_request_headers():\n return request.headers.keys()", "def getheader(self, name, default=None):\n return self.urllib3_response.getheader(name, default)", "def getheader(self, name, default=None):\n return self.urllib3_response.getheader(name, default)", "def getheader(self, name, default=None):\n return self.urllib3_response.getheader(name, default)", "def peek_header(self):\n header = None\n if self._headers:\n # returns the last element on the list\n header = self._headers[-1:]\n\n return header", "def headers(self):\n return(self.__response.headers)", "def get_request_headers(self):\n\t\theaders = {\n\t\t\t'Cache-Control': 'no-cache no-store max-age=1',\n\t\t\t'Connection': 'cache-control',\n\t\t}\n\t\tif self.last_modified:\n\t\t\theaders['If-Modified-Since'] = self.last_modified\n\t\tif self.etag:\n\t\t\theaders['If-None-Match'] = self.etag\n\t\treturn headers", "def get_header(self, name, default=None):\n return self.headers.get(name, default)", "def http_headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['HTTPHeaderArgs']]]]:\n return pulumi.get(self, \"http_headers\")", "def get_request_headers(self):\n return {\n 'Authorization': 'JWT ' + self.get_authorization_token()\n }", "def getHeader(self, key):\n if key not in self.headers:\n raise Exception(\"No such key in the header\")\n else:\n return self.headers[key]", "def get_auth_header(self):\n if not self.verify():\n return None\n\n auth_val = self.encode_auth_header_val()\n if not auth_val:\n return None\n\n return {'Authorization': auth_val.replace('\\n', '')}", "def get_request_headers(self, *args, **kwds):\n if self.request_headers:\n return self._unpack_headers(self.request_headers)", "def get_request_header(conn_socket: socket.socket) -> str:\n raw_double_new_line = \"\\r\\n\\r\\n\".encode(HttpServer.FORMAT)\n raw_request_header = conn_socket.recv(HttpServer.HEADER)\n\n while raw_double_new_line not in raw_request_header:\n raw_request_header += conn_socket.recv(HttpServer.HEADER)\n\n return raw_request_header.decode(HttpServer.FORMAT)", "def headers(self):\n return self._header", "def get_headers(req):\n user = req.headers.get('X-User-ID', None)\n tenant = req.headers.get('X-Tenant-ID', None)\n return user, tenant", "def authenticate_header(self, request):\n return self.keyword", "def getheaders(self):\n return self.__headers", "def fusion_api_get_headers(self):\n return self.fusion_client._headers.copy()", "def _get_header(self, header):\n if header is None:\n html = self.header()\n else:\n html = header\n return html", "def _get_authorization_header(self):\n return f\"token {self._context.get_github_token()}\"", "def get_authenticate_header(self):\n pass", "def get_header( self ):\n\t\tkey = self.key\n\t\tvalue = self.value\n\t\tpath = self.path\n\t\texpires = self.expires.strftime( \"%a, %d-%m-%y %H:%M:%S GMT\" )\n\t\treturn ( \"Set-Cookie\", \"%(key)s=%(value)s; Path=%(path)s; Expires=%(expires)s;\" % locals() )", "def headers(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"headers\")", "def get_token_auth_header():\n auth = request.headers.get(\"Authorization\", None)\n if not auth:\n return \"authorization_header_missing\"\n\n parts = auth.split()\n\n if parts[0].lower() != \"bearer\":\n return \"invalid_header\"\n elif len(parts) == 1:\n return \"invalid_header\"\n elif len(parts) > 2:\n return \"invalid_header\"\n\n token = parts[1]\n return token", "def getheaders(self):\n return self.urllib3_response.getheaders()", "def getheaders(self):\n return self.urllib3_response.getheaders()", "def getheaders(self):\n return self.urllib3_response.getheaders()", "def get_headers(self):\n return {\n 'Authorization': 'JWT {}'.format(self.token)\n }", "def get_headers():\n if not headers:\n headers[\"Content-Type\"] = \"application/json\"\n headers[\"Accept\"] = \"application/json\"\n headers[\"User-Agent\"] = constants.USER_AGENT\n headers[\"Authorization\"] = get_token(constants.AUTH_URL, cfg[\"key\"])\n\n return headers\n\n return headers", "def headers(self) -> Mapping[str, str]:\n return pulumi.get(self, \"headers\")", "def get_jwt_header(self):\n if self.jwt_header:\n return self.jwt_header\n self.jwt_header = self.get_jwt_token_from_secret_file(str(self.jwtfile))\n return self.jwt_header", "def get_headers(self):\r\n raise NotImplementedError", "def getAllHeaders(self, req):\n headers = {}\n for k, v in req.requestHeaders.getAllRawHeaders():\n headers[k.lower()] = v[-1]\n return headers", "def get_header(self, key, default = None):\n key = key.lower()\n for (_key, value) in self.headers:\n if key == _key.lower():\n return value\n return default", "def get_headers(input_header):\n if input_header:\n header = input_header\n else:\n header = create_marconi_headers()\n\n return header", "def get_headers():\n headers = {\n \"Authorization\": \"Token {}\".format(get_token()),\n }\n\n return headers", "def get_header(self, name, default=None, required=False):\n\n # Use try..except to optimize for the header existing in most cases\n try:\n # Don't take the time to cache beforehand, using HTTP naming.\n # This will be faster, assuming that most headers are looked\n # up only once, and not all headers will be requested.\n return self._headers[name.upper().replace('-', '_')]\n except KeyError:\n if not required:\n return default\n\n raise HTTPBadRequest('Missing header',\n 'The \"' + name + '\" header is required.')", "def getheader(self, name, default=None):\n if not self.__headers.has_hey(name):\n return default\n else: self.__headers[name]", "def _headers(self):\n\n auth_token = SendbeeAuth(self.client.api_secret).get_auth_token()\n headers = {\n 'X-Auth-Token': auth_token,\n 'X-Api-Key': self.client.api_key,\n 'Accept': 'application/json',\n 'Content-Type': 'application/json',\n 'User-Agent': 'Sendbee Python API Client'\n }\n self.debug.ok('headers', headers)\n\n return headers", "def get_http_headers(self):\n return dict(self.headers)", "def _headers(self):\n auth = AuthenticationProvider.currentAuth()\n\n return {\n 'Authorization': '%s %s' % (auth.tokenType, auth.accessToken),\n 'Content-Type': 'application/json'}", "def _GetHeaderNameValue(header):\n i = header.find(':')\n if i > 0:\n return (header[:i].lower(), header[i+1:].strip())\n return None", "def headers(self):\n headers = BASE_HEADERS\n if self.token:\n headers['X-Plex-Token'] = self.token\n return headers", "def header(self, **args):\n return self.pageConfig['header'] % self.pageConfig", "def get_token_auth_header():\n auth = request.headers.get('Authorization', None)\n if not auth:\n raise AuthError({'code': 'authorization_header_missing',\n 'description': 'Authorization header is expected'}, 401)\n\n parts = auth.split()\n\n if parts[0].lower() != 'bearer':\n raise AuthError({'code': 'invalid_header',\n 'description': 'Authorization header must start with Bearer'}, 401)\n\n if len(parts) < 2:\n raise AuthError({'code': 'invalid_header',\n 'description': 'Token not found after Bearer'}, 401)\n\n if len(parts) > 2:\n raise AuthError({'code': 'invalid_header',\n 'description': 'Authorization header is an invalid token structure'}, 401)\n\n return parts[1]", "def get_authorization_header(self):\n return {\"Authorization\": \"Bearer {}\".format(self.get_jwt())}", "def location_header(self):\n return self._location_header", "def _headers(self) -> Mapping[str, str]:\n return self.auth.headers() if self.auth else {}", "def get_headers(self, environ=None):\n return [('Content-Type', 'application/json')]", "def header(self):\n return copy.deepcopy(self._header)", "def get_api_header(token):\n return {\n 'Authorization': 'Token ' + str(token)}", "def build_header(self):\n authstring = \"Bearer \" + self.auth_token\n header = {\n \"Authorization\": authstring,\n \"Content-Type\": \"application/json\",\n \"User-Agent\": self.user_agent,\n \"Accept-Encoding\": \"gzip\"\n }\n return header", "def headers():\n return {\n 'user-agent': 'integration-tester',\n 'content-type': 'application/json',\n }", "def get_token_auth_header():\n # Get authorization form request header\n auth = request.headers.get('Authorization', None)\n # Check if authorization header exists\n if not auth:\n raise AuthError({\n 'code': 'authorization_header_missing',\n 'description': 'Authorization header is MISSING!'\n }, abort(401))\n # If bearer token, then first part of string = 'bearer'\n parts = auth.split()\n if parts[0].lower() != 'bearer':\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Authorization header must start with \"Bearer\"'\n }, abort(401))\n # Authorization header string length must be 2\n elif len(parts) != 2:\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Authorization header must be a BEARER token'\n }, abort(401))\n\n token = parts[1]\n return token", "def get_token_auth_header():\n auth = request.headers.get('Authorization', None)\n if not auth:\n raise AuthError({\n 'code': 'authorization_header_missing',\n 'description': 'Authorization header is expected.'\n }, 401)\n elif auth.split()[0].lower() != 'bearer':\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Authorization header must start with \"Bearer\".'\n }, 401)\n elif len(auth.split()) == 1:\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Authorization header must be include type and token.'\n }, 401)\n elif len(auth.split()) > 2:\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Authorization header must be Bearer token.'\n }, 401)\n else:\n token = auth.split()[1]\n return token", "def get_headers(self, session, **kwargs):\n token = self.get_token(session)\n\n if not token:\n return None\n\n return {IDENTITY_AUTH_HEADER_NAME: token}", "def _get_url_http_headers(self) -> Dict[str, str]:\n h = requests.head(self.url, allow_redirects=True)\n return h.headers", "def auth_header_value(self):\n return f\"token {self.API_TOKEN}\"", "def _get_headers(self, request):\n headers = {}\n for key, value in request.META.items():\n if key.startswith('HTTP_') and key != 'HTTP_HOST':\n headers[key[5:].replace('_', '-')] = value\n elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH') and value:\n headers[key.replace('_', '-')] = value\n\n if request.user:\n headers['PARTNER-EMAIL'] = request.user.email\n lang = self._get_lang(request)\n if lang:\n headers['ACCEPT-LANGUAGE'] = lang\n return headers", "def get_token_header(cls, token):\n if token is EMPTY_KNOX_TOKEN:\n return {}\n else:\n return {'HTTP_AUTHORIZATION': 'token {}'.format(token)}", "def get_jwt(self, request):\n auth_header_prefix = self.auth_header_prefix\n try:\n authorization = request.authorization\n except ValueError:\n return None\n if authorization is None:\n return None\n authtype, token = authorization\n if authtype.lower() != auth_header_prefix.lower():\n return None\n return token", "def get_headers(request: Dict[str, str]) -> Tuple[str, Dict[str, str]]:\n host = None\n headers = {}\n for name, value in request['META'].items():\n if name == \"HTTP_HOST\":\n host = value\n continue # comment to preserve host header, but eventual output contains host twice.\n if name.startswith('HTTP_'):\n headers[convert_header_names(name[5:])] = value.replace('\"', r'\\\"')\n assert host is not None, \"HTTP_HOST not found in request headers.\"\n return host, headers", "def header(self):\r\n raise NotImplementedError", "def get_token_auth_header():\n auth = request.headers.get(\"Authorization\", None)\n print(auth)\n\n if not auth:\n raise AuthError({\"code\": \"authorization_header_missing\",\n \"description\":\n \"Authorization header is expected\"}, 401)\n \n parts = auth.split()\n \n if parts[0].lower() != \"bearer\":\n raise AuthError({\"code\": \"invalid_header\",\n \"description\":\n \"Authorization header must start with\"\n \" Bearer\"}, 401)\n elif len(parts) == 1:\n raise AuthError({\"code\": \"invalid_header\",\n \"description\": \"Token not found\"}, 401)\n elif len(parts) > 2:\n raise AuthError({\"code\": \"invalid_header\",\n \"description\":\n \"Authorization header must be\"\n \" Bearer token\"}, 401)\n\n token = parts[1]\n return token", "def headers(self):\n return self.generator.headers", "def header(self) -> dict:\n return {\"typ\": self.typ, \"alg\": self._alg}", "def get_header(file):\n with open(file, 'r') as f:\n return f.readline()", "def default_headers(self):\n return RequestHeaders().get_default_request_headers()", "def get_headers(self):\n # Creating headers.\n headers = {'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',\n 'accept-encoding': 'gzip, deflate, sdch, br',\n 'accept-language': 'en-GB,en;q=0.8,en-US;q=0.6,ml;q=0.4',\n 'cache-control': 'max-age=0',\n 'upgrade-insecure-requests': '1',\n 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36'}\n return headers", "def view_headers():\n\n return jsonify(get_dict('headers'))", "def requestheaders(self, flow: mitmproxy.http.HTTPFlow):\n pass", "def requestheaders(self, flow: mitmproxy.http.HTTPFlow):\n pass", "def get_token_auth_header():\n auth = request.headers.get(\"Authorization\", None)\n if not auth:\n raise AuthError({\"code\": \"authorization_header_missing\",\n \"description\":\n \"Authorization header is expected\"}, 401)\n\n parts = auth.split()\n\n if parts[0].lower() != \"bearer\":\n raise AuthError({\"code\": \"invalid_header\",\n \"description\":\n \"Authorization header must start with\"\n \" Bearer\"}, 401)\n elif len(parts) == 1:\n raise AuthError({\"code\": \"invalid_header\",\n \"description\": \"Token not found\"}, 401)\n elif len(parts) > 2:\n raise AuthError({\"code\": \"invalid_header\",\n \"description\":\n \"Authorization header must be\"\n \" Bearer token\"}, 401)\n\n token = parts[1]\n return token", "def get_headers(self):\n return ['dep_red', 'dep_sd', 'hyp_red', 'hyp_sd']", "def header( self ):\n\t\treturn '; '.join( [ '='.join(i) for i in self.items() ] )", "def custom_headers(self):\n return self._custom_headers", "def getHeader(self):\n length = self.getInt()\n dest = self._getStr(definition.ADDRESS_LENGTH)\n origin = self._getStr(definition.ADDRESS_LENGTH)\n msgType = self._getStr(definition.MSG_TYPE_LENGTH)\n msgNr = self.getInt()\n return (length, dest, origin, msgType, msgNr)" ]
[ "0.8271102", "0.8128856", "0.7752471", "0.7752471", "0.7752471", "0.77344984", "0.76949877", "0.7601719", "0.7559333", "0.75524616", "0.7516345", "0.7507523", "0.72965723", "0.72964346", "0.72653186", "0.7242549", "0.72425336", "0.7175708", "0.7161306", "0.71201324", "0.71052045", "0.7092509", "0.70916677", "0.70514", "0.70514", "0.70514", "0.7021672", "0.69547766", "0.69486874", "0.69455194", "0.69142085", "0.6906375", "0.68920517", "0.68808484", "0.6873816", "0.6854221", "0.6848993", "0.68356055", "0.6804649", "0.6801347", "0.6783759", "0.6771566", "0.6763452", "0.6675472", "0.6667713", "0.6667243", "0.6662065", "0.6650478", "0.6650478", "0.6650478", "0.6638761", "0.6589257", "0.6583556", "0.65703416", "0.656182", "0.6561448", "0.65567887", "0.65401274", "0.653958", "0.6518693", "0.651169", "0.6496286", "0.6476033", "0.6465011", "0.64573145", "0.6456022", "0.6430668", "0.6427707", "0.6412026", "0.6410025", "0.6397119", "0.63955706", "0.63913137", "0.63911974", "0.63798916", "0.6378387", "0.6376597", "0.63515633", "0.6345633", "0.632831", "0.6317478", "0.6312068", "0.6297091", "0.62968975", "0.6287178", "0.62792534", "0.62732404", "0.6271917", "0.62699205", "0.62689453", "0.6247012", "0.6231067", "0.6230661", "0.6228864", "0.6228864", "0.62262845", "0.6225279", "0.6218732", "0.62119234", "0.6211584" ]
0.69505686
28
Push the executor pacakge to Jina Hub.
def push(self) -> None: with ImportExtensions(required=True): import requests pkg_path = Path(self.args.path) if not pkg_path.exists(): self.logger.critical(f'`{self.args.path}` is not a valid path!') exit(1) request_headers = self._get_request_header() try: # archive the executor package with TimeContext(f'Packaging {self.args.path}', self.logger): md5_hash = hashlib.md5() bytesio = archive_package(pkg_path) content = bytesio.getvalue() md5_hash.update(content) md5_digest = md5_hash.hexdigest() # upload the archived package form_data = { 'public': self.args.public if hasattr(self.args, 'public') else False, 'private': self.args.private if hasattr(self.args, 'private') else False, 'md5sum': md5_digest, 'force': self.args.force, 'secret': self.args.secret, } method = 'put' if self.args.force else 'post' hubble_url = get_hubble_url() # upload the archived executor to Jina Hub with TimeContext( f'Pushing to {hubble_url} ({method.upper()})', self.logger, ): resp = getattr(requests, method)( hubble_url, files={'file': content}, data=form_data, headers=request_headers, ) if 200 <= resp.status_code < 300: # TODO: only support single executor now image = resp.json()['executors'][0] uuid8 = image['id'] secret = image['secret'] visibility = image['visibility'] info_table = [ f'\t๐Ÿ”‘ ID:\t\t' + colored(f'{uuid8}', 'cyan'), f'\t๐Ÿ”’ Secret:\t' + colored( f'{secret}', 'cyan', ) + colored( ' (๐Ÿ‘ˆ Please store this secret carefully, it wont show up again)', 'red', ), f'\t๐Ÿ‘€ Visibility:\t' + colored(f'{visibility}', 'cyan'), ] if 'alias' in image: info_table.append(f'\t๐Ÿ“› Alias:\t' + colored(image['alias'], 'cyan')) self.logger.success(f'๐ŸŽ‰ Executor `{pkg_path}` is pushed successfully!') self.logger.info('\n' + '\n'.join(info_table)) usage = ( f'jinahub://{uuid8}' if visibility == 'public' else f'jinahub://{uuid8}:{secret}' ) self.logger.info(f'You can use it via `uses={usage}` in the Flow/CLI.') elif resp.text: # NOTE: sometimes resp.text returns empty raise Exception(resp.text) else: resp.raise_for_status() except Exception as e: # IO related errors self.logger.error( f'Error while pushing `{self.args.path}` with session_id={request_headers["jinameta-session-id"]}: ' f'\n{e!r}' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def launch():\n\n core.openflow.addListenerByName(\"ConnectionUp\", _handle_ConnectionUp)\n log.info(\"Hub running\")", "def pull(self) -> None:\n cached_zip_filepath = None\n try:\n scheme, name, tag, secret = parse_hub_uri(self.args.uri)\n\n executor = HubIO.fetch(name, tag=tag, secret=secret)\n\n if not tag:\n tag = executor.tag\n\n uuid = executor.uuid\n image_name = executor.image_name\n archive_url = executor.archive_url\n md5sum = executor.md5sum\n\n if scheme == 'jinahub+docker':\n # pull the Docker image\n with TimeContext(f'pulling {image_name}', self.logger):\n image = self._client.images.pull(image_name)\n if isinstance(image, list):\n image = image[0]\n image_tag = image.tags[0] if image.tags else ''\n self.logger.success(\n f'๐ŸŽ‰ pulled {image_tag} ({image.short_id}) uncompressed size: {get_readable_size(image.attrs[\"Size\"])}'\n )\n return\n if exist_local(uuid, tag):\n self.logger.debug(\n f'The executor `{self.args.uri}` has already been downloaded.'\n )\n return\n # download the package\n with TimeContext(f'downloading {self.args.uri}', self.logger):\n cache_dir = Path(\n os.environ.get(\n 'JINA_HUB_CACHE_DIR', Path.home().joinpath('.cache', 'jina')\n )\n )\n cache_dir.mkdir(parents=True, exist_ok=True)\n cached_zip_filename = f'{uuid}-{md5sum}.zip'\n cached_zip_filepath = download_with_resume(\n archive_url,\n cache_dir,\n cached_zip_filename,\n md5sum=md5sum,\n )\n\n with TimeContext(f'unpacking {self.args.uri}', self.logger):\n try:\n install_local(\n cached_zip_filepath,\n uuid,\n tag,\n install_deps=self.args.install_requirements,\n )\n except Exception as ex:\n raise HubDownloadError(str(ex))\n\n except Exception as e:\n self.logger.error(\n f'Error when pulling the executor `{self.args.uri}`: {e!r}'\n )\n finally:\n # delete downloaded zip package if existed\n if cached_zip_filepath is not None:\n cached_zip_filepath.unlink()", "def run(self, executor: Interface):\n\n pass # pragma: no cover", "def _push_to_server(self) -> None:\n pass", "def push_application(self):\n raise NotImplementedError()", "def remote_push(self, pNamespace):", "def main():\n executor(option().host)", "def executor(self):", "def push_thread(self):\n raise NotImplementedError()", "def push():\n local('hg push jvacx')", "def executorRouter(self, name, executor):\n pass", "def push(self, *args, **kwargs):\n pass", "async def install(self) -> None:\n tasks = [asyncio.create_task(self.miners[miner].main_loop()) for miner in self.miners]\n await asyncio.gather(*tasks)", "def deploy_worker(dist_file):\n _set_credentials()\n provision()\n _deploy_python_package(dist_file)\n _reload_supervisor()", "def create_java_executor(self):\r\n if self.context.options.nailgun_daemon and not os.environ.get('PANTS_DEV'):\r\n classpath = os.pathsep.join(\r\n self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._nailgun_bootstrap_key))\r\n client = NailgunExecutor(self._workdir, classpath, distribution=self._dist)\r\n else:\r\n client = SubprocessExecutor(self._dist)\r\n return client", "def test_webuidriver_remote(self):\n\n hub = SeleniumJar(self.jar_path, self.java_path).hub(4444)\n hub.start_server()\n\n node = SeleniumJar(self.jar_path, self.java_path).node(5555, (\"localhost\", 4444))\n node.start_server()\n executors = SeleniumHatch.get_remote_executors(\"localhost\", 4444)\n\n # self.driver = webuidriver.Remote(executor, options=self.opt) ไธŽไธ‹้ข่ฏญๅฅๆ•ˆๆžœๆ˜ฏไธ€ๆ ท็š„\n self.driver = webuidriver.Remote(executors[0], desired_capabilities=self.opt.to_capabilities())\n\n self.driver.get('http://www.baidu.com')\n time.sleep(1)\n\n # webuidriver.Remote ๆ˜ฏ webdriver.Chrome ็š„ๅญ็ฑป\n self.assertTrue(issubclass(webuidriver.Chrome, webdriver.Remote))\n self.assertIsInstance(self.driver, webdriver.Remote)\n\n self.driver.close()\n self.driver.quit()\n\n hub.stop_server()\n node.stop_server()", "def push_dockerhub(c, app, version, latest=False):\n if app.lower() == 'core':\n c.run('sudo docker push kinecosystem/stellar-core:{version}'.format(version=version))\n if latest:\n c.run('sudo docker push kinecosystem/stellar-core:latest')\n elif app.lower() == 'horizon':\n c.run('sudo docker push kinecosystem/horizon:{version}'.format(version=version))\n if latest:\n c.run('sudo docker push kinecosystem/horizon:latest')\n elif app.lower() == 'friendbot':\n c.run('sudo docker push kinecosystem/friendbot:{version}'.format(version=version))\n if latest:\n c.run('sudo docker push kinecosystem/friendbot:latest')\n else:\n Exit('Unknown application {}'.format(app))", "def gitHubConnect():\n return HUB", "async def start(self):\n envs = self.user_env()\n self.remote_host = await self.start_ec2_instance(envs)\n \n # commenting this out till I have added aws networking within a subnet\n # port = await self.remote_random_port()\n port=int(os.getenv('REMOTE_PORT'))\n if port is None or port == 0:\n return False\n cmd = []\n\n cmd.extend(self.cmd)\n cmd.extend(self.get_args())\n\n if self.hub_api_url != \"\":\n old = \"--hub-api-url={}\".format(self.hub.api_url)\n new = \"--hub-api-url={}\".format(self.hub_api_url)\n for index, value in enumerate(cmd):\n if value == old:\n cmd[index] = new\n for index, value in enumerate(cmd):\n if value[0:6] == '--port':\n cmd[index] = '--port=%d' % (port)\n\n remote_cmd = ' '.join(cmd)\n\n remote_cmd = '/usr/local/bin/'+remote_cmd\n\n self.log.debug(\"Command issued to remote serve: {}\".format(remote_cmd))\n self.pid = await self.exec_notebook(remote_cmd)\n\n self.log.debug(\"Starting User: {}, PID: {}\".format(self.user.name, self.pid))\n\n if self.pid < 0:\n return None\n # DEPRECATION: Spawner.start should return a url or (ip, port) tuple in JupyterHub >= 0.9\n return (self.remote_host, int(port))", "def _register(self, comm, handler):", "def docker_push(c):\n cli_tasks.docker_push.run(c)", "def start(setup): #pragma: no cover\n import warnings\n warnings.warn(\"start() is deprecated, use run() instread\", DeprecationWarning)\n\n\n async def main():\n await setup()\n await initialize()\n try:\n tasks = []\n for hub in Hub.hubs:\n tasks.append(spawn(hub.run()))\n for task in tasks:\n await task\n finally:\n await finalize()\n loop = get_event_loop()\n loop.run_until_complete(main(program))", "def launch(self):", "def push(images, tag, registry):\n manager = Manager('push', tag, images=images, registry_url=registry)\n manager.run()", "def executor(self, name):\n return Registry.require(Executor, name)", "def push():\n branch = git.current_branch().name\n shell.run('git push -u origin {}'.format(branch))", "def register_to_core(self):\n self.channel.basic_publish(exchange='', routing_key='peripheral_register', body=json.dumps({self.name: api}))", "def register_worker(self):\n raise Exception('not implemented')", "def start(self):\n self.configure_app()\n for task in self.tasks: # pragma: no cover\n self._running_tasks.append(asyncio.Task(task))\n self.hub.add_subscriptions(self.subscriptions)\n self.log.info(\"Started plugin `%s` (%d task(s))\",\n self.name, len(self._running_tasks))", "def add_executor(self, executor: Executor) -> None:\n self.executors[executor.name] = executor\n executor.scheduler = self", "def installExecutionEngine(self, myargs):\n return", "def push(self):\n origin = self.git_repo.remotes.origin\n origin.push()", "def remote():\n pass", "def run(self, registry):", "def docker_worker():", "def push_greenlet(self):\n raise NotImplementedError()", "def ship():\n cotton.git_push()\n cotton.install_python_dependencies()\n\n # Deploy the secrets module to the remote project root\n spath = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'secrets'))\n put(spath, env.project_root)\n\n cotton.upload_template_and_reload('cron')", "def connect_to_master():", "def onSlave(self):", "def RemoteBuild(self, image):\n raise NotImplementedError()", "def run(self):\n if self._main_loop:\n return\n self._main_loop = GObject.MainLoop()\n self._disconnect_all()\n self._register()\n logger.info(\"--- Mainloop started ---\")\n logger.info(\"Hub is ready for onboarding\")\n try:\n self._main_loop.run()\n except KeyboardInterrupt:\n # ignore exception as it is a valid way to exit the program\n # and skip to finally clause\n pass\n except Exception as e:\n logger.error(e)\n finally:\n logger.info(\"--- Mainloop finished ---\")\n self._unregister()\n self._main_loop.quit()\n self._main_loop = None", "async def main():\n # Create the queue of work\n work_queue = asyncio.Queue()\n\n # Put some work in the queue\n for url in [\n \"http://google.com\",\n \"http://yahoo.com\",\n \"http://linkedin.com\",\n \"http://apple.com\",\n \"http://microsoft.com\",\n \"http://facebook.com\",\n \"http://twitter.com\",\n ]:\n await work_queue.put(url)\n\n browser = HTTPBrowser(2,\n work_queue.qsize(),\n LoadBalancing()\n )\n\n await browser.browser_session(work_queue)", "def _configure_remote_executor(ex, cardinalities, loop):\n if loop.is_running():\n asyncio.run_coroutine_threadsafe(ex.set_cardinalities(cardinalities), loop)\n else:\n loop.run_until_complete(ex.set_cardinalities(cardinalities))\n return", "def __call__(self):\n return self._executor()", "def submit(self, *args, **kwargs):\n return self.executor.submit(*args, **kwargs)", "def push_base():\n docker('login')\n docker('push %s' % env.base_image_name)", "def run_instance():\n data = check_args(\n ('cloudProvider', 'apiKey', 'secretKey', 'packageName', 'OS',\n 'sgPorts')\n )\n job = jobs.deploy.apply_async(args=(data,))\n current_user.add_job(job.id)\n return make_response(job_id=job.id)", "def install_experiment(self):\n # read git credentials configuration\n try:\n with open('GlobalConfigurations/tokens.json', 'r') as tokens_file:\n data = json.load(tokens_file)\n username = data['GitHub']['user']\n password = data['GitHub']['password']\n\n except EnvironmentError:\n print('Cannot open tokens file')\n\n protocol_name = self.protocol_config['protocol']\n working_directory = self.protocol_config['workingDirectory']\n cp = list(self.protocol_config['CloudProviders'].keys())\n git_address = self.protocol_config['CloudProviders'][cp[0]]['git']['gitAddress']\n git_branch = self.protocol_config['CloudProviders'][cp[0]]['git']['gitBranch']\n\n for idx in range(len(working_directory)):\n os.system(f'fab -f Execution/fabfile.py install_git_project:{username},{password},{git_branch[idx]},'\n f'{git_address[idx]},{working_directory[idx]} --parallel | '\n f' tee WebApp/ExecutionLogs/{protocol_name}.log')", "def git_push(c):\n c.run(\"git submodule foreach git push \")", "def add_worker(ip: str = Argument(..., help=\"Server IP\"),\n key_ssh: str= Argument(..., help=\"Path to ssh key file\"),\n user_ssh: str = Argument(..., help=\"User in the server\"),\n hostname: str = Argument(..., help=\"Ex: ws01.example.com\"),\n mannager_ip: str = Argument(..., help=\"Mannager cluster IP\")):\n registers = os.getcwd() + '/commands/templates/manager_registers.txt'\n if os.path.exists(registers):\n with open(registers, 'r') as f:\n line = f.readline()\n while line:\n line = line.split(' ')\n line_ip = line[-3].split(':')[0]\n if line_ip == mannager_ip:\n echo(style(\"Connecting with Server\", fg=blue, bold=True))\n server = create_connection(user_ssh, ip, key_ssh)\n install_docker(server)\n install_docker_compose(server)\n init_service(hostname, server)\n server.run(' '.join(line[:-2]))\n break\n else:\n line = f.readline()\n\n msg = 'Not registers for the mannager server ip'\n echo(style(msg, fg=blue, bold=True))\n msg = 'Enter server user for of mannager node'\n user = prompt(style(msg, fg=blue, bold=True))\n msg = style('Enter path to ssh key file', fg=blue, bold=True)\n\n msg = style('Enter path to ssh key file', fg=blue, bold=True)\n key = prompt(msg)\n server = create_connection(user, mannager_ip, key)\n st = str(server.run('docker swarm join-token worker')).split()\n print(st)\n else:\n msg = 'Not registers for the mannager server ip'\n echo(style(msg, fg=blue, bold=True))\n\n msg = 'Enter server user for of mannager node'\n user = prompt(style(msg, fg=blue, bold=True))\n msg = style('Enter path to ssh key file', fg=blue, bold=True)\n key = prompt(msg)\n #server = create_connection(user, ip_mannager, key)", "def run(self):\n self._logging(logging.INFO, 'Starting the Github bot.')\n\n with acquire_inter_process_lock('github_bot') as acquired:\n if not acquired:\n err_msg = 'Another instance of the Github bot is already ' \\\n 'running, aborting now.'\n logging.getLogger(__name__).log(logging.WARNING, err_msg)\n else:\n self._push_new_job_offers_to_github()", "def on_DeployMinerNode_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def on_fork(self, payload):\n pass", "def upload(self, connection):\n if not self.already_deployed(connection):\n if self.config.project_type == \"java\":\n print(blue('Pushing jar to nexus server'))\n connection.local('mvn deploy')\n self._already_deployed = True\n else:\n raise Exception(f\"Unsupported project type: {self.config.project_type}\")", "def exec_worker(self, endpoint, args, request):\n raise NotImplementedError", "def run():\n board = GoBoard(7)\n con = GtpConnection(Gomoku(), board)\n con.start_connection()", "def register(project_id, runner):\n pass", "def execute_module(self):\n raise NotImplementedError", "def run():\n register_component(\"press\")\n run_app(host=\"0.0.0.0\", port=8080, debug=True, workers=os.cpu_count())", "def set_executor(self, executor: futures.Executor):\n self._executor = executor", "def deploy():\n build()\n collect()\n commit()\n push()", "def deploy():", "def run(*args: Any, **kwargs: Any) -> None:\n # ArgumentParser setup\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-c\", \"--config\",\n help=\"kojid configuration file path\",\n default=\"/etc/kojid/kojid.conf\")\n\n # koji-builder options setup\n ns = parser.parse_args(*args, **kwargs)\n c = config.Config.from_path(ns.config)\n opts = c.section('kojid')\n \n # koji-builder session setup\n logger.info('Authenticating...')\n s = session_setup(opts)\n s.session.exclusiveSession(force=True)\n logger.info('Authenticated.')\n\n # runner setup (kube, mock, etc)\n # scheduler setup (uses runner)\n # task handler setup (used by runners)\n # log each setup step\n\n h = host.Host(s.session, 'mbbox.default', capacity=3.0)\n h.sync()\n\n host_id = s.session.host.getID()\n logger.info(f'Using koji-builder host id: \"{host_id}\".')\n\n st_expired = koji.BR_STATES['EXPIRED']\n states = [koji.BR_STATES[k] for k in koji.BR_STATES]\n tasks: Dict[Any, Any] = {}\n loop_interval = int(os.environ.get('KOJI_BUILDER_INTERVAL', '5'))\n\n while True:\n #build roots\n build_roots = s.session.listBuildroots(hostID=host_id, state=tuple(states))\n logger.info(f'Total Build Roots found: {len(build_roots)}')\n build_roots = dict([(row['id'], row) for row in build_roots])\n for k, v in build_roots.items():\n task_id = v['task_id']\n uid = v['id']\n tag_name = v['tag_name']\n arch = v['arch']\n if task_id is None:\n # not associated with a task\n # this makes no sense now, but may in the future\n logger.warning(f'Expiring taskless buildroot: {uid}/{tag_name}/{arch}')\n s.session.host.setBuildRootState(id, st_expired)\n elif task_id not in tasks:\n logger.info('Expiring buildroot: {uid}/{tag_name}/{arch}')\n logger.debug(f'Buildroot task: {task_id}, Current tasks: {to_list(tasks.keys())}')\n s.session.host.setBuildRootState(uid, st_expired)\n continue\n #tasks\n tasks = h.get_tasks()\n if len(tasks) == 0:\n logger.info('0 tasks found, sleeping for 5 seconds.')\n utils.wait(loop_interval)\n continue\n print(tasks)\n\n # scheduler checks for new tasks (see flow)\n # identify the type of task handler to use\n # scheduler provision a task to run (pod running with some command?)\n # task status check + update (most likely includes FS updates and koji-hub api calls)\n # may need to store logs someplace else\n # errors should have a status of failure and store logs\n if mode == Modes.TEST:\n break\n elif mode == Modes.DAEMON:\n utils.wait(loop_interval)", "def RunCommand(self, params):\n prefix = ['container', 'hub', 'memberships', 'register']\n return self.Run(prefix + params)", "def pusher_activate(self):\n self.pusher(\"push\")\n time.sleep(2.0) # Definitely not above 3.2!!!\n self.pusher(\"retract\")\n time.sleep(3.2)\n self.pusher(\"stop\")", "def run():\n board = SimpleGoBoard(7)\n con = GtpConnection(Gomoku4(), board)\n con.start_connection()", "def push(self, remote, branch, *args):\n return self.cmd('push', remote, branch, *args)", "def _get_executor_init(self, workers):\n raise NotImplementedError", "def main():\n configure_logging()\n\n # Attributes tell us what subscription has been created for us to listen to.\n project = get_metadata('instance/attributes/pubsub_subscription_project')\n service_account = get_metadata('instance/attributes/pubsub_service_account')\n subscription = get_metadata('instance/attributes/pubsub_subscription')\n pubsub = PubSub(service_account=service_account)\n\n while True:\n logging.info('Polling for new messages')\n ack_ids = []\n start_time = time.time()\n response = pubsub.pull(subscription, project)\n for message in response.get('receivedMessages', []):\n ack_ids.append(message['ackId'])\n attributes = message['message'].get('attributes', {})\n message = base64.b64decode(message['message'].get('data', ''))\n logging.info(\n 'Received message: %s\\nAttributes: %s',\n message,\n json.dumps(attributes, indent=2),\n )\n\n if message == 'CONNECT' and attributes.get('swarming_server'):\n if os.path.exists(SWARMING_UPSTART_CONFIG_DEST):\n os.remove(SWARMING_UPSTART_CONFIG_DEST)\n shutil.copy2(SWARMING_UPSTART_CONFIG_SRC, SWARMING_UPSTART_CONFIG_DEST)\n\n if not os.path.exists(SWARMING_BOT_DIR):\n os.mkdir(SWARMING_BOT_DIR)\n chrome_bot = pwd.getpwnam(CHROME_BOT)\n os.chown(SWARMING_BOT_DIR, chrome_bot.pw_uid, chrome_bot.pw_gid)\n\n if os.path.exists(SWARMING_BOT_ZIP):\n # Delete just the zip, not the whole directory so logs are kept.\n os.remove(SWARMING_BOT_ZIP)\n\n bot_code = urllib2.urlopen(urlparse.urljoin(\n attributes.get('swarming_server'), 'bot_code'))\n with open(SWARMING_BOT_ZIP, 'w') as fd:\n shutil.copyfileobj(bot_code, fd)\n os.chown(SWARMING_BOT_ZIP, chrome_bot.pw_uid, chrome_bot.pw_gid)\n\n pubsub.acknowledge(subscription, project, ack_ids)\n subprocess.check_call(['/sbin/shutdown', '-r', 'now'])\n elif message == 'LEASED' and attributes.get('lease_expiration_ts'):\n with open(LEASE_EXPIRATION_FILE, 'w') as f:\n f.write(attributes['lease_expiration_ts'])\n\n if ack_ids:\n pubsub.acknowledge(subscription, project, ack_ids)\n if time.time() - start_time < 1:\n # Iterate at most once per second (chosen arbitrarily).\n time.sleep(1)", "def push(self, **kwargs):\n return _taskpipeoperation(self,'push', **kwargs)", "def onPreFork(self):", "async def main(event):\n if conf.MATRIX_PW:\n LOGGER.info(f\"Log in {conf.MATRIX_ID=} on {conf.MATRIX_URL=}\")\n await utils.CLIENT.login(conf.MATRIX_PW)\n else:\n LOGGER.info(f\"Restoring log in {conf.MATRIX_ID=} on {conf.MATRIX_URL=}\")\n utils.CLIENT.access_token = conf.MATRIX_TOKEN\n\n server = web.Server(handler.matrix_webhook)\n runner = web.ServerRunner(server)\n await runner.setup()\n LOGGER.info(f\"Binding on {conf.SERVER_ADDRESS=}\")\n site = web.TCPSite(runner, *conf.SERVER_ADDRESS)\n await site.start()\n\n # Run until we get a shutdown request\n await event.wait()\n\n # Cleanup\n await runner.cleanup()\n await utils.CLIENT.close()", "def main():\n signal.signal(signal.SIGINT, signal_handler)\n signal.signal(signal.SIGQUIT, signal_handler)\n signal.signal(signal.SIGTERM, signal_handler)\n SCL.on_state_change(updatehub.listener.Action.ENTER,\n updatehub.listener.State.DOWNLOADING,\n callback)\n SCL.on_error(error_callback)\n\n SCL.start()\n\n while True:\n time.sleep(1)", "def main():\n setup()\n master = Master()\n master.start()", "def _submit_to_queue(self, script_file):", "def eval_executor(self, spawn):\n return self.interpreter.exec_signal.connect(\n lambda line: spawn(self.interpreter.exec_, line))", "def create_executor_plugin_manager() -> pluggy.PluginManager:\n pm = create_plugin_manager()\n pm.add_hookspecs(TaskGraphHooks)\n return pm", "def init_app(self, app):\n app.config.setdefault(self.EXECUTOR_TYPE, 'thread')\n app.config.setdefault(self.EXECUTOR_PUSH_APP_CONTEXT, True)\n futures_max_length = app.config.setdefault(self.EXECUTOR_FUTURES_MAX_LENGTH, None)\n propagate_exceptions = app.config.setdefault(self.EXECUTOR_PROPAGATE_EXCEPTIONS, False)\n if futures_max_length is not None:\n self.futures.max_length = int(futures_max_length)\n if str2bool(propagate_exceptions):\n self.add_default_done_callback(propagate_exceptions_callback)\n self._self = self._make_executor(app)\n app.extensions[self.name + 'executor'] = self", "def publish():\n pass", "def _make_executor(self, expr=None):\n raise NotImplementedError()", "def push(self, base_repo, branch=\"master\"):\n base_repo.push_to(self, branch)", "def _push_to_server(self) -> None:\n if not self.url or not self.job_name:\n return\n\n try:\n pushadd_to_gateway(self.url, job=self.job_name, registry=REGISTRY, handler=self._auth_handler)\n\n except OSError as exp:\n self.logger.warning(\"Failed to push metrics to %s: %s\", self.url, str(exp))\n except:\n self.logger.exception(\"Failed to push metrics to %s\", self.url)\n\n self.logger.debug(\"Pushed metrics to %s\", self.url)", "def repository_create_hosted():\n pass", "def register(self):\n logger.info(\"Registering with Hub...\")\n register_complete = Event()\n\n def on_register_complete(result=None, error=None):\n # This could be a failed/successful registration result from the HUB\n # or a error from polling machine. Response should be given appropriately\n if result is not None:\n if result.status == \"assigned\":\n logger.info(\"Successfully registered with Hub\")\n else: # There be other statuses\n logger.error(\"Failed registering with Hub\")\n if error is not None: # This can only happen when the polling machine runs into error\n logger.info(error)\n\n register_complete.set()\n\n self._polling_machine.register(callback=on_register_complete)\n\n register_complete.wait()", "async def setup(self):\n pass", "async def execute(self):", "def main():\n\n # Logging\n coloredlogs.install(level='INFO')\n\n # Connect to Rabbit\n rabbit = Client('hms_website', 'haum', ['irc_command'])\n\n rabbit.connect()\n\n def voice_required(f):\n \"\"\"Decorator that checks if the sender is voiced.\"\"\"\n def wrapper(*args):\n print(args)\n if 'is_voiced' in args[2] and args[2]['is_voiced']:\n return f(*args)\n else:\n rabbit.publish('irc_debug', {'privmsg': 'On se connait ? Tu nโ€™es pas voiced mon ami...'})\n return wrapper\n\n\n @topic('irc_command')\n def callback(client, topic, message):\n\n @voice_required\n def do_work(client, topic, message):\n rabbit.publish('irc_debug', {'privmsg': 'Mise ร  jour du site en coursโ€ฆ'})\n\n success = updatesite()\n message = \"T'as tout cassรฉ\"\n\n if success:\n message = \"Le site est ร  jour !\"\n\n rabbit.publish('irc_debug', {'privmsg': message})\n\n\n if 'command' in message and message['command'] == 'updatesite':\n do_work(client, topic, message)\n\n\n rabbit.listeners.append(callback)\n \n # Infinite listenning for messages\n rabbit.start_consuming()\n\n rabbit.disconnect()", "def upload_package(self, __contents):\n raise NotImplementedError", "def emit(self, event, *args, **kwargs):\n\n super(ExecutorEventEmitter, self).emit(event, *args, **kwargs)", "def main():\r\n app = appdirs.AppDirs('Python Installer', 'Unicorn')\r\n try:\r\n os.makedirs(app.user_log_dir)\r\n except:\r\n pass\r\n\r\n pyversion = platform.python_version()\r\n pyarch = platform.architecture()[0]\r\n\r\n # log installed python version\r\n with open(os.path.join(app.user_log_dir, 'install.log'), 'a', encoding='utf-8') as fp:\r\n fp.write('Python {} ({}) installed.'.format(pyversion, pyarch))\r\n\r\n # log installed modules\r\n modules = freeze.freeze()\r\n module_str = ''\r\n for module in modules:\r\n module_str += '{}\\n'.format(module)\r\n \r\n with open(os.path.join(app.user_log_dir, 'modules-py{}-{}.log'.format(pyversion, pyarch)), 'w', encoding='utf-8') as fp:\r\n fp.write(module_str)\r\n\r\n app = QtGui.QApplication(sys.argv)\r\n\r\n hello = QtGui.QLabel(\"Python {} ({}) installed\".format(pyversion, pyarch))\r\n hello.show()\r\n hello.resize(250, 80)\r\n sys.exit(app.exec_())", "def exec(self):\n if self._root.master is None:\n self._root.mainloop()", "def release_pypi():\n local('python setup.py clean sdist register upload')", "def run(self):\n\n self.make_connection()\n self.channel()\n self.declare_queue()\n self.publish_message()\n self.close_connection()", "def upload():\n sh('python setup.py register sdist upload')", "def run(self):\n self.connect()", "async def startup(self):", "async def startup(self):", "def pushkey(self, addr, passwd, keyname=\"\", pubkey=\"\", port=22, login=\"root\"):\n ExecutorSSH(addr, port=port, login=login, passwd=passwd, pushkey=keyname, pubkey=pubkey)", "def run_on_host(self, *args, **kwargs) -> Any:\n raise NotImplementedError", "def _main_helper(self):\n asyncio.create_task(self._main())" ]
[ "0.6232041", "0.5887297", "0.58601266", "0.58165723", "0.5768271", "0.56156373", "0.55585796", "0.55408835", "0.5533365", "0.5520638", "0.5495429", "0.54430073", "0.5429095", "0.5398484", "0.53716093", "0.53678733", "0.5364028", "0.53437376", "0.5337322", "0.5332302", "0.5331389", "0.53165686", "0.53023344", "0.52942485", "0.5280901", "0.5270223", "0.5261417", "0.52603245", "0.5258933", "0.5251157", "0.524206", "0.523993", "0.52378637", "0.5236994", "0.5201709", "0.51846623", "0.5179952", "0.5130123", "0.51209116", "0.5118564", "0.5109079", "0.5105035", "0.50844854", "0.50831246", "0.50738674", "0.5072365", "0.50696754", "0.50555646", "0.5054993", "0.5053339", "0.50509673", "0.5050285", "0.5042351", "0.5033186", "0.5030065", "0.5026098", "0.50249845", "0.5017438", "0.50092983", "0.5002611", "0.49998114", "0.49963862", "0.49862835", "0.49831432", "0.4972177", "0.4958386", "0.49385834", "0.4930376", "0.49297273", "0.4926714", "0.49199402", "0.49081996", "0.48963407", "0.48937738", "0.48930648", "0.48926812", "0.4883068", "0.48765886", "0.4869697", "0.48631817", "0.4863085", "0.48627225", "0.48496228", "0.48492548", "0.48476654", "0.4846955", "0.48463288", "0.48458898", "0.48435163", "0.4842404", "0.4840793", "0.4839377", "0.4834801", "0.48334917", "0.48305357", "0.48280078", "0.48280078", "0.48243573", "0.4818459", "0.48160073" ]
0.7116745
0
Fetch the executor meta info from Jina Hub.
def fetch( name: str, tag: Optional[str] = None, secret: Optional[str] = None, ) -> HubExecutor: with ImportExtensions(required=True): import requests pull_url = get_hubble_url() + f'/{name}/?' path_params = {} if secret: path_params['secret'] = secret if tag: path_params['tag'] = tag request_headers = HubIO._get_request_header() pull_url += urlencode(path_params) resp = requests.get(pull_url, headers=request_headers) if resp.status_code != 200: if resp.text: raise Exception(resp.text) resp.raise_for_status() resp = resp.json() result = HubExecutor( resp['id'], resp.get('alias', None), resp['tag'], resp['visibility'], resp['image'], resp['package']['download'], resp['package']['md5'], ) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def execute_info(self):\n return self._execute_info", "def get_hub_info(self, session_key):\n \n username = None\n password = None\n hub_address = None\n hub_port = None\n \n uri = urllib.quote('/servicesNS/nobody/insteon_control/admin/alert_actions/send_insteon_command') + '?output_mode=json'\n \n try:\n serverResponse, serverContent = splunk.rest.simpleRequest(uri, method='GET', sessionKey=session_key)\n info = json.loads(serverContent)\n \n username = info['entry'][0]['content']['param.username']\n password = info['entry'][0]['content']['param.password']\n hub_address = info['entry'][0]['content']['param.address']\n hub_port = info['entry'][0]['content']['param.port']\n \n except AuthenticationFailed as e:\n raise e\n except Exception as e: \n self.logger.exception(\"Error when attempting to load send_insteon_command alert action configuration\")\n \n raise e\n \n return hub_address, hub_port, username, password", "def pull(self) -> None:\n cached_zip_filepath = None\n try:\n scheme, name, tag, secret = parse_hub_uri(self.args.uri)\n\n executor = HubIO.fetch(name, tag=tag, secret=secret)\n\n if not tag:\n tag = executor.tag\n\n uuid = executor.uuid\n image_name = executor.image_name\n archive_url = executor.archive_url\n md5sum = executor.md5sum\n\n if scheme == 'jinahub+docker':\n # pull the Docker image\n with TimeContext(f'pulling {image_name}', self.logger):\n image = self._client.images.pull(image_name)\n if isinstance(image, list):\n image = image[0]\n image_tag = image.tags[0] if image.tags else ''\n self.logger.success(\n f'๐ŸŽ‰ pulled {image_tag} ({image.short_id}) uncompressed size: {get_readable_size(image.attrs[\"Size\"])}'\n )\n return\n if exist_local(uuid, tag):\n self.logger.debug(\n f'The executor `{self.args.uri}` has already been downloaded.'\n )\n return\n # download the package\n with TimeContext(f'downloading {self.args.uri}', self.logger):\n cache_dir = Path(\n os.environ.get(\n 'JINA_HUB_CACHE_DIR', Path.home().joinpath('.cache', 'jina')\n )\n )\n cache_dir.mkdir(parents=True, exist_ok=True)\n cached_zip_filename = f'{uuid}-{md5sum}.zip'\n cached_zip_filepath = download_with_resume(\n archive_url,\n cache_dir,\n cached_zip_filename,\n md5sum=md5sum,\n )\n\n with TimeContext(f'unpacking {self.args.uri}', self.logger):\n try:\n install_local(\n cached_zip_filepath,\n uuid,\n tag,\n install_deps=self.args.install_requirements,\n )\n except Exception as ex:\n raise HubDownloadError(str(ex))\n\n except Exception as e:\n self.logger.error(\n f'Error when pulling the executor `{self.args.uri}`: {e!r}'\n )\n finally:\n # delete downloaded zip package if existed\n if cached_zip_filepath is not None:\n cached_zip_filepath.unlink()", "def fetch_info():\n global JOLOKIA_CONNECTIONS\n for connection in JOLOKIA_CONNECTIONS.keys():\n try:\n data = JOLOKIA_CONNECTIONS[connection]['j4p'].getRequests()\n for ele in data:\n parse_info(ele, JOLOKIA_CONNECTIONS[connection]['instance'])\n except Exception, e:\n collectd.error('jolokia plugin: Error at jolokia endpoint %s - %r' % (connection, e))", "def info(self):\n resp = self.server.request(\"get\", \"/jobs/%s/%s\" % (self.sessionid,\n self.name))\n return self.server.json_body(resp)", "def get_eventhub_info(self):\n self._create_connection()\n eh_name = self.address.path.lstrip('/')\n target = \"amqps://{}/{}\".format(self.address.hostname, eh_name)\n mgmt_client = uamqp.AMQPClient(target, auth=self.auth, debug=self.debug)\n mgmt_client.open(self.connection)\n try:\n mgmt_msg = Message(application_properties={'name': eh_name})\n response = mgmt_client.mgmt_request(\n mgmt_msg,\n constants.READ_OPERATION,\n op_type=b'com.microsoft:eventhub',\n status_code_field=b'status-code',\n description_fields=b'status-description')\n eh_info = response.get_data()\n output = {}\n if eh_info:\n output['name'] = eh_info[b'name'].decode('utf-8')\n output['type'] = eh_info[b'type'].decode('utf-8')\n output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000)\n output['partition_count'] = eh_info[b'partition_count']\n output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']]\n return output\n except:\n raise\n finally:\n mgmt_client.close()", "async def _fetch_data(self) -> JobInfo:\n return await self.api.get_job()", "def describe(self) -> None:\n return {\n 'cluster_metadata': self.cluster_metadata,\n 'master_url': self.master_url\n }", "def redis_info(self):\n def func(server):\n return server.info()\n self.__run_redis_cmd(func)", "async def read_worker_metadata(self) -> Dict[str, Any]:\n response = await self._client.get(\"collections/views/aggregate-worker-metadata\")\n response.raise_for_status()\n return response.json()", "def rpc_info():", "def _mesos_task_info(self, submissionId):\n agent_id = agent_hostname = agent_port = framework_id = container_id = None\n get_state = self.driver.getState()['get_state']\n get_tasks = get_state['get_tasks']\n\n tasks = get_tasks['tasks'] + get_tasks.get('completed_tasks', [])\n tasks_list = list(filter(lambda x: x['task_id']['value'] == submissionId, tasks))\n if len(tasks_list) > 0:\n task = tasks_list[0]\n agent_id = task['agent_id']['value']\n framework_id = task['framework_id']['value']\n\n if agent_id is not None:\n get_agents = get_state['get_agents']\n agents = get_agents['agents']\n agents_list = list(filter(lambda x: x['agent_info']['id']['value'] == agent_id, agents))\n if len(agents_list) > 0:\n agent = agents_list[0]\n agent_hostname = agent['agent_info']['hostname']\n agent_port = agent['agent_info']['port']\n agent_driver = MesosOperatorAgentDriver('{}:{}'.format(agent_hostname, agent_port))\n containers = agent_driver.getContainers()['get_containers']['containers']\n containers_list = list(filter(lambda x: x['executor_id']['value'] == submissionId, containers))\n if len(containers_list) > 0:\n container = containers_list[0]\n container_id = container['container_id']['value']\n\n return agent_id, agent_hostname, str(agent_port), framework_id, container_id", "def info(self):\n return self.current_run.info", "def get_info(self):\n pass", "def get_info(self):\n pass", "def getInfo():", "def _fetch_current_remote_metadata(conn):\n content = _get(conn, REMOTE_METADATA_FILE)\n metadata = json.loads(content) if content else {}\n return metadata", "def executor_object():\n try:\n return ExecutorThread.local_thread.executor_object\n except AttributeError:\n pass\n return None", "def get_info(self):\n raise NotImplementedError(\"Robot.get_info\")", "def remote_info():\n run('uname -a')", "def get_info(self):\n return None", "def executor(self, name):\n return Registry.require(Executor, name)", "def meta(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'meta')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def get_koji_build_info(build_id, remote, ctx):\n py_cmd = ('import koji; '\n 'hub = koji.ClientSession(\"{kojihub_url}\"); '\n 'print(hub.getBuild({build_id}))')\n py_cmd = py_cmd.format(\n build_id=build_id,\n kojihub_url=config.kojihub_url\n )\n log.info('Querying kojihub for info on build {0}'.format(build_id))\n build_info = _run_python_command(py_cmd, remote, ctx)\n return build_info", "def meta_info(environ, start_response, logger, handle):\n pass", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def get_info():\n global PERF_APP\n archs = None\n best_arch = None\n cipher_algos = None\n hash_algos = None\n aead_algos = None\n\n cmd = PERF_APP + ' --print-info'\n\n try:\n res = subprocess.run(cmd, stdout=subprocess.PIPE, \\\n stderr=subprocess.STDOUT, \\\n env=ENVS, shell=True, check=True)\n output = res.stdout.decode('utf-8')\n except subprocess.CalledProcessError as e:\n print(\"Error (\" + str(e.returncode) + \")\")\n print(e.output.decode('utf-8'))\n sys.exit(1)\n\n lines = output.rstrip().split('\\n')\n try:\n for line in lines:\n info = line.split(':')\n if info[0] == 'Supported architectures':\n archs = info[1].split()\n if info[0] == 'Best architecture':\n best_arch = info[1].split()\n if info[0] == 'Supported cipher algorithms':\n cipher_algos = info[1].split()\n if info[0] == 'Supported hash algorithms':\n hash_algos = info[1].split()\n if info[0] == 'Supported aead algorithms':\n aead_algos = info[1].split()\n except:\n print(\"Error parsing --print-info output:\\n\" \\\n \"{}\".format(output), file=sys.stderr)\n\n if archs is None or best_arch is None or cipher_algos is None \\\n or hash_algos is None or aead_algos is None:\n print(\"Error parsing system and app information\", file=sys.stderr)\n sys.exit(1)\n\n return archs, best_arch, cipher_algos, hash_algos, aead_algos", "def AsyncConfStatLearnedInformation(self):\n\t\tfrom ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.asyncconfstatlearnedinformation_u3rhdexlyxjuzwrjbmzvcm1hdglvbg import AsyncConfStatLearnedInformation\n\t\treturn AsyncConfStatLearnedInformation(self)", "def info(self):\n return self._fetch_json('/api/info')", "def _get_information(self):\n pass", "def get_info(self):\n url = self._url_for_op('info')\n data= None # This will be a GET request since data is None\n response = self._get_raw_response(self._get_json_headers,\n self._get_json_response, url, data)\n response = json.loads(response)\n self.api_info = response['results']\n return self.api_info", "def fetch_metadata(requests_impl=requests):\n\n print(f'fetching metadata at {Network.METADATA_URL}')\n return requests_impl.get(Network.METADATA_URL).json()", "async def _get_info(self, event):\n\n title, url = event.description.split(\"\\n\", 1)\n\n ytdl = YoutubeDL(self.YOUTUBE_DL_OPTIONS)\n try:\n info = await self.loop.run_in_executor(None, lambda: ytdl.extract_info(url, **self.YOUTUBE_DL_EXTRACT_OPTIONS))\n except DownloadError:\n info = {\"is_live\": False}\n\n info[\"streamer\"] = event.name.split(\":\", 1)[0]\n info[\"live_status\"] = \"is now live\" if info.get(\"is_live\") else \"will be live shortly\"\n info[\"title\"] = title\n info[\"url\"] = url\n\n return info", "def get_info(self): \n return {\n \"ident\": self.ident,\n \"interval\": self._interval,\n \"exception\": self._exception,\n \"execute\": self._execute,\n \"args\": self._args,\n \"kwargs\": self._kwargs}", "def test_get_executors(app, manager, plan):\n url = url_for(\"executors.get_executors\")\n\n run_time = datetime.now() + timedelta(hours=10)\n executor = manager.add_executor(run_time, \"executor name\", {}, plan.id)\n\n with app.test_request_context(url):\n res = app.test_client().get(url)\n assert res.status_code == 200\n assert res.mimetype == \"application/hal+json\"\n assert res.json == Document(embedded={\"executors\": [executor.to_dict()]}).to_dict()", "def info(self):\n return requests.get(self.info_url + self.pid).json()", "def get_details():\r\n return run_operations.get_run_details(experiment_name, job_name).as_dict(key_transformer=camel_case_transformer)", "def get_info(self):\n return \"TODO !\"", "def _get_executor_init(self, workers):\n raise NotImplementedError", "def info(self):\r\n\r\n return self.sim_info", "async def get_system_info(self) -> Dict[str, Any]:\n assert self._client is not None\n return await self._client.invoke_method(\"system.info\")", "def info() -> None:", "def executor(self):", "def metadata(cls):\n return {\n 'id': constants.WHOLE_REPO_PROFILER_ID,\n 'display_name': _('Profiler to install entire puppet repo'),\n 'types' : [constants.TYPE_PUPPET_MODULE]\n }", "def get_hypervisor_info(self):\n try:\n req = Request(self.compute_url +\n \"/os-hypervisors/detail\" )\n self._upgrade_to_authenticated_request(req)\n resp = urlopen(req)\n content = resp.read().decode('utf-8')\n encoded = json.loads(content)\n resp.close()\n except URLError as e:\n return {}\n except Exception as e:\n raise Exception(\"Unable to process compute reponse: %s\" % e)\n\n return encoded['hypervisors']", "def GetMetadata(self):\n return self.dict['meta']", "def plugin_info():\n\n return {\n 'name': 'MAX31865 Async plugin',\n 'version': '1.0',\n 'mode': 'async',\n 'type': 'south',\n 'interface': '1.0',\n 'config': _DEFAULT_CONFIG\n }", "def run_info(self):\n return \"MPI: %d, OMP: %d\" % (self.mpi_procs, self.omp_threads)", "def _request_bootstrap_server_info() -> str:\n if __debug__:\n logger.info(\"Requesting bootstrap server...\")\n req = BootstrapServerRequest()\n DistroStreamClientHandler.request(req)\n\n # Retrieve answer\n req.wait_processed()\n error = req.get_error_code()\n if error != 0:\n raise BackendException(error, req.get_error_msg())\n\n # Parse answer\n answer = req.get_response_msg()\n if __debug__:\n logger.debug(\"Retrieved bootstrap server information: %s\", answer)\n\n return answer", "def meta(self):\n raise NotImplementedError", "def get_info_inst(self):\n return self.get_info(\"INST\")", "def _getAllMeta(self):\n try:\n metadata = pyexiv2.ImageMetadata(self.imagePath)\n metadata.read()\n return metadata\n except:\n print 'error reading meta data'\n return None", "def read_metadata(self):\n return self.parent.controller.get_tag_metadata()", "def git_hub_resource_info(self) -> Optional[pulumi.Input['GitHubResourceInfoArgs']]:\n return pulumi.get(self, \"git_hub_resource_info\")", "def platform_info(self):\n return platform.uname()._asdict()", "def info(self):\n meta = {\n \"name\": self.name,\n \"description\": self.description,\n \"version\": self.version,\n \"labels\": self.labels,\n \"models\": {k: v.info() for k, v in self._infers.items() if v.is_valid()},\n \"trainers\": {k: v.info() for k, v in self._trainers.items()},\n \"strategies\": {k: v.info() for k, v in self._strategies.items()},\n \"scoring\": {k: v.info() for k, v in self._scoring_methods.items()},\n \"train_stats\": {k: v.stats() for k, v in self._trainers.items()},\n \"datastore\": self._datastore.status(),\n }\n\n # If labels are not provided, aggregate from all individual infers\n if not self.labels:\n merged = []\n for labels in [v.get(\"labels\", []) for v in meta[\"models\"].values()]:\n if labels and isinstance(labels, dict):\n labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])] # type: ignore\n for label in labels:\n if label not in merged:\n merged.append(label)\n meta[\"labels\"] = merged\n\n return meta", "def info(self):\n _, data = yield from self.transport.perform_request('GET', '/')\n return data", "async def _async_get_addon_info(self) -> AddonInfo:\n addon_manager: AddonManager = get_addon_manager(self.hass)\n try:\n addon_info: AddonInfo = await addon_manager.async_get_addon_info()\n except AddonError as err:\n LOGGER.error(err)\n raise AbortFlow(\"addon_info_failed\") from err\n\n return addon_info", "def get_metadata():\n meta_data = {}\n keys = ['ami-id', 'placement/availability-zone', 'instance-id',\n 'instance-type', 'local-hostname', 'local-ipv4',\n 'public-hostname', 'public-ipv4', 'security-groups', 'user-data']\n for key in keys:\n url = \"http://169.254.169.254/latest/meta-data/\" + key\n meta_data[key] = urllib.urlopen(url).read()\n meta_data['security-groups'] = meta_data['security-groups'].split('\\n')\n return meta_data", "def _pypi_details(self) -> Tuple[str, str, str, str, str]:\n json_value = pypi_fetch(self._reserved_name)\n if json_value:\n latest_version = self._parse_latest_version(json_value)\n license_value = self._get_license(json_value)\n latest_release_date = self._parse_latest_update(json_value, latest_version)\n release_count = self._parse_release_count(json_value)\n summary = self._parse_summary(json_value)\n return (\n latest_version,\n license_value,\n latest_release_date,\n release_count,\n summary,\n )\n return \"\", \"Unknown\", \"\", \"\", \"\"", "def info(self):\r\n return self._get('info', {})", "async def info(self, ctx):\n\n uptime = func.time_(self.bot.launch_time)\n users = sum(1 for _ in self.bot.get_all_members())\n channels = sum(1 for _ in self.bot.get_all_channels())\n\n author = self.bot.get_user(299879858572492802)\n\n invite = 'https://discordapp.com/oauth2/authorize?client_id=347205176903335937&scope=bot&permissions=470150359'\n about = ('Infamous is a actively developed bot that gets updated daily.'\n f' It is written with passion by {author} using the Rewrite branch of the discord.py library.')\n\n links = (f'**[[Invite Bot]]({invite})** \\n'\n '**[[Fame Discord]](https://discord.gg/NY2MSA3)** \\n'\n '**[[Discord.py]](https://github.com/Rapptz/discord.py/tree/rewrite)** \\n'\n '**[[Support]](https://discord.gg/JyJTh4H)**')\n\n # From Modelmat\n cpu_usage = self.process.cpu_percent() / psutil.cpu_count()\n ram_usage = self.process.memory_full_info().uss / 1024 ** 2\n\n embed = discord.Embed(color=self.bot.embed_color)\n embed.set_author(name=self.bot.user.name, icon_url=self.bot.user.avatar_url)\n embed.description = 'A multi-purpose bot with image manipulation, wiki pages and it\\'s own rpg; originally a ' \\\n 'community bot for โ˜… Fame โ˜…'\n embed.set_thumbnail(\n url=self.bot.user.avatar_url)\n\n embed.add_field(name='About', value=about, inline=False)\n\n embed.add_field(name='Statistics ๐Ÿ“ˆ',\n value=(f'**{len(self.bot.guilds)} guilds.**\\n'\n f'**{channels} channels.**\\n'\n f'**{users} users.** \\n'\n f'**{self.bot.lines} lines**'), inline=True)\n\n embed.add_field(name='Uptime โฐ', value=(f'**{uptime[0]} days.** \\n'\n f'**{uptime[1]} hours.** \\n'\n f'**{uptime[2]} minutes.** \\n'\n f'**{uptime[3]} seconds.**'), inline=True)\n\n embed.add_field(name='Developer ๐Ÿ•ต', value=author)\n embed.add_field(name='Resources ๐Ÿ’ป', value='`CPU:` {:.2f}% \\n`MEM:` {:.2f}'.format(cpu_usage, ram_usage))\n embed.add_field(name='Links ๐Ÿ”—', value=links, inline=True)\n\n await ctx.send(embed=embed)", "async def _status():\n # TODO(Deepankar): should we add versions of executors?\n return {\n 'status_code': status.HTTP_200_OK,\n 'jina_version': jina_version\n }", "def get_info(self, key: str) -> TaskInfo:\n raise NotImplementedError", "def get_metadata(self):\n return self.client._perform_json(\n \"GET\", \"/projects/%s/recipes/%s/metadata\" % (self.project_key, self.recipe_name))", "def get_information(self):\n try:\n return self._get_information()\n except(AttributeError, KeyError) as e:\n self._logger.error(f\"Error scrapping the tab information: {e}\")", "def get_metadata(self):\n # currently there is no metadata to send\n return {}", "def info(self) -> dict:", "def parse_cpu_info(self):\n pipe = subprocess.Popen([self.core_exe, '-c'], 0, None, None,subprocess.PIPE)\n lines = pipe.stdout.readlines()\n x = 0\n json_str = ''\n while x < len(lines):\n json_str += lines[x].decode('utf-8').strip()\n x += 1\n decoder = json.decoder.JSONDecoder()\n self.cpu_info = decoder.decode(json_str)\n return self.cpu_info", "def info(): # noqa: E501\n return 'do some magic!'", "def grab_cmd_info(cmd):\n r = requests.get(cmd.url)\n r.raise_for_status()\n soup_cmd = bs4.BeautifulSoup(r.text, 'lxml')\n\n # get body text, sanitise\n body = soup_cmd.body.text\n body = re.sub(r'\\n\\n+', '\\n', body) # get rid of extra lines\n body = re.sub(r'\\n[\\s\\xa0]+', '\\n', body) # remove leading spaces\n body = re.sub(r'[\\s\\xa0]+\\n', '\\n', body) # remove trailing spaces\n\n info = {}\n\n # get brief description\n p_brief = re.compile(cmd.name+r'\\n([\\d\\w.()-/ \\'\\n]*)\\nSynopsis', re.IGNORECASE)\n brief_search = p_brief.search(body)\n if brief_search:\n info['brief'] = brief_search.group(1).replace('\\n', ' ')\n else:\n print 'No brief info for', cmd.name\n info['brief'] = None\n\n # get synopsis\n p_synopsis = re.compile(r'Synopsis\\n('+cmd.name+r'.+)[\\n ]+Description', re.IGNORECASE | re.DOTALL)\n synopsis_search = p_synopsis.search(body)\n if synopsis_search:\n synopsis_raw = synopsis_search.group(1)\n info['synopsis'] = [i.replace('\\n', ' ').strip() for i in synopsis_raw.split(cmd.name) if i]\n else:\n print 'No synopsis info for', cmd.name\n info['synopsis'] = None\n\n # get description\n# p_desc = re.compile(r'Description\\n(.*?)\\nOptions', re.IGNORECASE | re.DOTALL)\n return info", "def get_server_metadata(self, name):\n raise NotImplementedError", "def info(self):\n resp = requests.get(\"%s/api/info\"%self.urlbase, verify=False)\n return resp.json", "def get_session_info():\n query = {\"type\": \"op\", \"cmd\": \"<show><session><info></info></session></show>\"}\n\n return __proxy__[\"panos.call\"](query)", "def backend_info(self):\n backend_info = self.backend.get_metadata()\n\n if \"description\" in backend_info:\n self.backendInfo.setText(str(backend_info[\"description\"]))", "def device_info(self):\n info = {\n \"identifiers\": {\n (\n DOMAIN,\n \"serial-number\",\n self._ctrl.data[\"routerboard\"][\"serial-number\"],\n \"switch\",\n \"Scripts\",\n )\n },\n \"manufacturer\": self._ctrl.data[\"resource\"][\"platform\"],\n \"model\": self._ctrl.data[\"resource\"][\"board-name\"],\n \"name\": f\"{self._inst} Scripts\",\n }\n return info", "def run_info ( run_num ) : \n global _rinfos_\n rinfo = _rinfos_.get ( run_num , None )\n if rinfo : return rinfo \n \n try :\n \n #\n url = run_url.format ( run_num )\n _obj = urllib.urlopen ( url )\n rinfo = json.load ( _obj )\n\n rinfo = rinfo if rinfo else None\n _rinfos_ [ run_num ] = rinfo \n return rinfo\n \n except:\n return None \n\n return None", "async def get_thread_info(self) -> Any:\n return await self.AD.threading.get_thread_info()", "def get_info(self) -> str:\n raise NotImplementedError()", "def getInfo(self):\n return self.info", "def metadata(self): # -> None:\n ...", "def get_info(repos):\n info = \"labelord application is master-to-master application for label replication using webhook for GitHub<br>\"\n for i in repos:\n info += i + ' ' + repo_link(i) + '<br>'\n return info", "def info(self):", "def info(self):", "def invocation_metadata(self):\n raise NotImplementedError()", "def job_metadata(self) -> pulumi.Output['outputs.JobMetadataResponse']:\n return pulumi.get(self, \"job_metadata\")", "def GetMetadata(cmdline, ninjalog):\n\n build_dir = os.path.dirname(ninjalog)\n\n build_configs = {}\n\n try:\n args = ['gn', 'args', build_dir, '--list', '--short', '--json']\n if sys.platform == 'win32':\n # gn in PATH is bat file in windows environment (except cygwin).\n args = ['cmd', '/c'] + args\n\n gn_args = subprocess.check_output(args)\n build_configs = ParseGNArgs(gn_args)\n except subprocess.CalledProcessError as e:\n logging.error(\"Failed to call gn %s\", e)\n build_configs = {}\n\n # Stringify config.\n for k in build_configs:\n build_configs[k] = str(build_configs[k])\n\n metadata = {\n 'platform': platform.system(),\n 'cpu_core': multiprocessing.cpu_count(),\n 'build_configs': build_configs,\n 'targets': GetBuildTargetFromCommandLine(cmdline),\n }\n\n jflag = GetJflag(cmdline)\n if jflag is not None:\n metadata['jobs'] = jflag\n\n return metadata", "def mobile_executor_initialization():\n # Read robot_list from ROS parameter server. Robotnames are strings.\n robot_namespaces = rospy.get_param(\"/robot_list\")\n # Go through the robot list, extract each name and create MobileExecutor object with that name, then append to the MEx list.\n for i in robot_namespaces:\n robot_name = i \n robot_instance = MobileExecutor(robot_name)\n mex_list.append(robot_instance)", "def get_info(self) -> Optional[Dict[str, Any]]:", "def get_info(self, key: str) -> TaskInfo:\n return self.task_graph.nodes[key][\"info\"]", "def fetch_info(self, client):\n self.log_verbose(\"Sending info command\")\n client.send(\"info\")\n try:\n data = client.read_response()\n except RedisError as e:\n collectd.error(\"redis_info plugin: Error response from %s:%d - %r\" % (self.host, self.port, e))\n return None\n\n self.log_verbose(\"Received data: %s\" % data)\n\n linesep = \"\\r\\n\" if \"\\r\\n\" in data else \"\\n\"\n info_dict = self.parse_info(data.split(linesep))\n\n return info_dict", "async def info(self, ctx):\n self.logger.info(misolog.format_log(ctx, f\"\"))\n appinfo = await self.client.application_info()\n membercount = sum(1 for x in self.client.get_all_members())\n info_embed = discord.Embed(title=f\"Miso Bot | version {main.version}\",\n description=f\"Created by {appinfo.owner.mention}\\n\\n\"\n f\"Use `{self.client.command_prefix}help` to get the list of commands, \"\n f\"or visit the documention website for more help.\"\n f\"\\n\\nCurrently active in **{len(self.client.guilds)}** \"\n f\"servers totaling **{membercount}** unique users\",\n colour=discord.Colour.red())\n\n # info_embed.set_footer(text=f'version 2.0')\n info_embed.set_thumbnail(url=self.client.user.avatar_url)\n info_embed.add_field(name='Github', value='https://github.com/joinemm/miso-bot', inline=False)\n info_embed.add_field(name='Documentation', value=\"http://joinemm.me/misobot\", inline=False)\n info_embed.add_field(name='Patreon', value=\"https://www.patreon.com/joinemm\", inline=False)\n await ctx.send(embed=info_embed)", "async def test_api_supervisor_info(\n hassio_handler, aioclient_mock: AiohttpClientMocker\n) -> None:\n aioclient_mock.get(\n \"http://127.0.0.1/supervisor/info\",\n json={\n \"result\": \"ok\",\n \"data\": {\"supported\": True, \"version\": \"2020.11.1\", \"channel\": \"stable\"},\n },\n )\n\n data = await hassio_handler.get_supervisor_info()\n assert aioclient_mock.call_count == 1\n assert data[\"supported\"]\n assert data[\"version\"] == \"2020.11.1\"\n assert data[\"channel\"] == \"stable\"", "def computer_info():\n return {\n 'system': platform.system(),\n 'architecture': platform.architecture(),\n 'name': platform.node(),\n 'release': platform.release(),\n 'version': platform.version(),\n 'machine': platform.machine(),\n 'processor': platform.processor(),\n 'virtual CPUs': mproc.cpu_count(),\n 'total RAM': _get_ram(),\n }", "def info(self):\n return self._info", "def info(client):\n\n return client.get_info()", "def user_env(self):\n\n # FIXME I think the JPY_ variables have been deprecated in JupyterHub\n # since 0.7.2, we should replace them. Can we figure this out?\n\n env = super(EC2Spawner, self).get_env()\n env.update(dict(\n JUPYTERHUB_PREFIX=self.hub.server.base_url,\n Name='Jupyter',\n PATH=self.path\n ))\n\n if self.notebook_dir:\n env['NOTEBOOK_DIR'] = self.notebook_dir\n\n hub_api_url = self.hub.api_url\n if self.hub_api_url != '':\n hub_api_url = self.hub_api_url\n\n env['JPY_HUB_API_URL'] = hub_api_url\n env['JUPYTERHUB_API_URL'] = hub_api_url\n\n self.log.debug(\"Env built: {}\".format(env))\n return env", "def getInfo(self):\n self.info = requests.get(G.api + self.testId + '/snapshots/' + self.hash, auth=(G.username, G.authkey)).json()\n return self.info", "def info(self):\n return (self._title, self._version, self._descr)", "def gather_metric(self):\n result = self._shell.run(self.PYTHON_COMMAND)\n # Python prints this to stderr\n version = result.stdout.split()[-1]\n\n response = {self.PYTHON_VERSION: version}\n return response" ]
[ "0.5562067", "0.5546919", "0.5502387", "0.54784894", "0.542903", "0.5346842", "0.532653", "0.5297018", "0.5291502", "0.52755237", "0.5223626", "0.52146435", "0.5180011", "0.51614326", "0.51614326", "0.5159329", "0.51201266", "0.51103526", "0.5103046", "0.5100491", "0.50945884", "0.50761396", "0.50572306", "0.5053128", "0.50368047", "0.5026444", "0.49964005", "0.4979684", "0.4971314", "0.49657854", "0.49459752", "0.49364394", "0.49343017", "0.49193493", "0.49190882", "0.49157625", "0.48956123", "0.48899454", "0.48705128", "0.48634732", "0.48566797", "0.48430052", "0.4823629", "0.48026985", "0.47926813", "0.4789728", "0.47852707", "0.4778224", "0.47769707", "0.47758135", "0.47734317", "0.4767561", "0.47635916", "0.47605962", "0.4760085", "0.47551948", "0.4752813", "0.47485152", "0.4747319", "0.4744301", "0.47408208", "0.47401315", "0.47386354", "0.47234514", "0.47226772", "0.47174922", "0.47171342", "0.47155184", "0.47124588", "0.47112787", "0.47064623", "0.4703867", "0.47033247", "0.47029755", "0.46990553", "0.46926373", "0.46915406", "0.46891698", "0.4677222", "0.467672", "0.4672989", "0.46555415", "0.46481237", "0.46481237", "0.46427768", "0.46419293", "0.46313235", "0.46295306", "0.46221563", "0.46218166", "0.461952", "0.46171993", "0.4614525", "0.46077386", "0.46063265", "0.46055245", "0.46049306", "0.4604622", "0.46028587", "0.4598857" ]
0.530817
7
Pull the executor package from Jina Hub.
def pull(self) -> None: cached_zip_filepath = None try: scheme, name, tag, secret = parse_hub_uri(self.args.uri) executor = HubIO.fetch(name, tag=tag, secret=secret) if not tag: tag = executor.tag uuid = executor.uuid image_name = executor.image_name archive_url = executor.archive_url md5sum = executor.md5sum if scheme == 'jinahub+docker': # pull the Docker image with TimeContext(f'pulling {image_name}', self.logger): image = self._client.images.pull(image_name) if isinstance(image, list): image = image[0] image_tag = image.tags[0] if image.tags else '' self.logger.success( f'๐ŸŽ‰ pulled {image_tag} ({image.short_id}) uncompressed size: {get_readable_size(image.attrs["Size"])}' ) return if exist_local(uuid, tag): self.logger.debug( f'The executor `{self.args.uri}` has already been downloaded.' ) return # download the package with TimeContext(f'downloading {self.args.uri}', self.logger): cache_dir = Path( os.environ.get( 'JINA_HUB_CACHE_DIR', Path.home().joinpath('.cache', 'jina') ) ) cache_dir.mkdir(parents=True, exist_ok=True) cached_zip_filename = f'{uuid}-{md5sum}.zip' cached_zip_filepath = download_with_resume( archive_url, cache_dir, cached_zip_filename, md5sum=md5sum, ) with TimeContext(f'unpacking {self.args.uri}', self.logger): try: install_local( cached_zip_filepath, uuid, tag, install_deps=self.args.install_requirements, ) except Exception as ex: raise HubDownloadError(str(ex)) except Exception as e: self.logger.error( f'Error when pulling the executor `{self.args.uri}`: {e!r}' ) finally: # delete downloaded zip package if existed if cached_zip_filepath is not None: cached_zip_filepath.unlink()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def push(self) -> None:\n\n with ImportExtensions(required=True):\n import requests\n\n pkg_path = Path(self.args.path)\n if not pkg_path.exists():\n self.logger.critical(f'`{self.args.path}` is not a valid path!')\n exit(1)\n\n request_headers = self._get_request_header()\n\n try:\n # archive the executor package\n with TimeContext(f'Packaging {self.args.path}', self.logger):\n md5_hash = hashlib.md5()\n bytesio = archive_package(pkg_path)\n content = bytesio.getvalue()\n md5_hash.update(content)\n\n md5_digest = md5_hash.hexdigest()\n\n # upload the archived package\n form_data = {\n 'public': self.args.public if hasattr(self.args, 'public') else False,\n 'private': self.args.private\n if hasattr(self.args, 'private')\n else False,\n 'md5sum': md5_digest,\n 'force': self.args.force,\n 'secret': self.args.secret,\n }\n\n method = 'put' if self.args.force else 'post'\n\n hubble_url = get_hubble_url()\n # upload the archived executor to Jina Hub\n with TimeContext(\n f'Pushing to {hubble_url} ({method.upper()})',\n self.logger,\n ):\n resp = getattr(requests, method)(\n hubble_url,\n files={'file': content},\n data=form_data,\n headers=request_headers,\n )\n\n if 200 <= resp.status_code < 300:\n # TODO: only support single executor now\n image = resp.json()['executors'][0]\n\n uuid8 = image['id']\n secret = image['secret']\n visibility = image['visibility']\n\n info_table = [\n f'\\t๐Ÿ”‘ ID:\\t\\t' + colored(f'{uuid8}', 'cyan'),\n f'\\t๐Ÿ”’ Secret:\\t'\n + colored(\n f'{secret}',\n 'cyan',\n )\n + colored(\n ' (๐Ÿ‘ˆ Please store this secret carefully, it wont show up again)',\n 'red',\n ),\n f'\\t๐Ÿ‘€ Visibility:\\t' + colored(f'{visibility}', 'cyan'),\n ]\n\n if 'alias' in image:\n info_table.append(f'\\t๐Ÿ“› Alias:\\t' + colored(image['alias'], 'cyan'))\n\n self.logger.success(f'๐ŸŽ‰ Executor `{pkg_path}` is pushed successfully!')\n self.logger.info('\\n' + '\\n'.join(info_table))\n\n usage = (\n f'jinahub://{uuid8}'\n if visibility == 'public'\n else f'jinahub://{uuid8}:{secret}'\n )\n\n self.logger.info(f'You can use it via `uses={usage}` in the Flow/CLI.')\n elif resp.text:\n # NOTE: sometimes resp.text returns empty\n raise Exception(resp.text)\n else:\n resp.raise_for_status()\n except Exception as e: # IO related errors\n self.logger.error(\n f'Error while pushing `{self.args.path}` with session_id={request_headers[\"jinameta-session-id\"]}: '\n f'\\n{e!r}'\n )", "def fetch(\n name: str,\n tag: Optional[str] = None,\n secret: Optional[str] = None,\n ) -> HubExecutor:\n\n with ImportExtensions(required=True):\n import requests\n\n pull_url = get_hubble_url() + f'/{name}/?'\n path_params = {}\n if secret:\n path_params['secret'] = secret\n if tag:\n path_params['tag'] = tag\n\n request_headers = HubIO._get_request_header()\n\n pull_url += urlencode(path_params)\n resp = requests.get(pull_url, headers=request_headers)\n if resp.status_code != 200:\n if resp.text:\n raise Exception(resp.text)\n resp.raise_for_status()\n\n resp = resp.json()\n\n result = HubExecutor(\n resp['id'],\n resp.get('alias', None),\n resp['tag'],\n resp['visibility'],\n resp['image'],\n resp['package']['download'],\n resp['package']['md5'],\n )\n\n return result", "def test_load_one_pip(self, build_image_for_jupyterlab):\n\n key = \"pip&gtmunit1\"\n lb, username = build_image_for_jupyterlab[0], build_image_for_jupyterlab[5]\n\n loader = PackageLatestVersionLoader([key], lb, username)\n promise1 = loader.load(key)\n assert isinstance(promise1, Promise)\n\n pkg = promise1.get()\n assert pkg == '0.12.4'", "def executor(self, name):\n return Registry.require(Executor, name)", "def gitHubConnect():\n return HUB", "def pypi(self):\n return PackageHelper.get_pypi(name=self.name)", "def run(self, executor: Interface):\n\n pass # pragma: no cover", "def create_java_executor(self):\r\n if self.context.options.nailgun_daemon and not os.environ.get('PANTS_DEV'):\r\n classpath = os.pathsep.join(\r\n self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._nailgun_bootstrap_key))\r\n client = NailgunExecutor(self._workdir, classpath, distribution=self._dist)\r\n else:\r\n client = SubprocessExecutor(self._dist)\r\n return client", "async def _pull(self) -> None:\n raise NotImplementedError()", "def main():\n executor(option().host)", "def get_installation_packages(self):\n self.logger.debug(\"get_installation_packages()\")\n parameter = {'onlyLatest':'False'}\n resp = self._im_session.get(\"{}/{}\".format(self._im_api_url, 'types/InstallationPackageWithLatest/instances'), params=parameter)\n #resp = self._im_session.get('https://192.168.100.52/types/InstallationPackageWithLatest/instances', params=parameter)\n jresp = json.loads(resp.text)\n #pprint(jresp.text)\n return jresp", "async def pull(self) -> None:\n pull_fun = getattr(self, '_pull')\n if inspect.iscoroutinefunction(pull_fun):\n await pull_fun()\n return\n await run_sync(pull_fun)", "def get_hub_if_exists():\n return _threadlocal.hub", "def test_webuidriver_remote(self):\n\n hub = SeleniumJar(self.jar_path, self.java_path).hub(4444)\n hub.start_server()\n\n node = SeleniumJar(self.jar_path, self.java_path).node(5555, (\"localhost\", 4444))\n node.start_server()\n executors = SeleniumHatch.get_remote_executors(\"localhost\", 4444)\n\n # self.driver = webuidriver.Remote(executor, options=self.opt) ไธŽไธ‹้ข่ฏญๅฅๆ•ˆๆžœๆ˜ฏไธ€ๆ ท็š„\n self.driver = webuidriver.Remote(executors[0], desired_capabilities=self.opt.to_capabilities())\n\n self.driver.get('http://www.baidu.com')\n time.sleep(1)\n\n # webuidriver.Remote ๆ˜ฏ webdriver.Chrome ็š„ๅญ็ฑป\n self.assertTrue(issubclass(webuidriver.Chrome, webdriver.Remote))\n self.assertIsInstance(self.driver, webdriver.Remote)\n\n self.driver.close()\n self.driver.quit()\n\n hub.stop_server()\n node.stop_server()", "def pullall():\n\tprint(red('\\t\\tStarting download of QNIBTerminal images\\t\\t'))\n\t# pull all the needed images\n\tdocker_images={'fd20','terminal','helixdns','elk','slurm','compute'}\n\tfor image in docker_images:\n\t\tprint 'docker pull qnib/'+image\n\t\t#ย run('docker pull qnib/'+image)", "def init(self, manager):\n yield self.task(\n name=\"unpack\",\n doc=f\"unpack a 'gold master' JupyterLite from {self.app_archive.name}\",\n actions=[(self._unpack_stdlib, [])],\n file_dep=[self.app_archive],\n targets=[manager.output_dir / JUPYTERLITE_JSON],\n )", "def download_and_extract(self, package_name):\n raise NotImplementedError('Implement this method.')", "def launch():\n\n core.openflow.addListenerByName(\"ConnectionUp\", _handle_ConnectionUp)\n log.info(\"Hub running\")", "def package(self):\n if self.method == 'buildNotification':\n return self.params[1]['name']\n if self.method in ('createImage', 'image', 'livecd'):\n return self.params[0]\n if self.method == 'indirectionimage':\n return self.params[0]['name']\n # params[0] is the source URL for these tasks:\n if self.method not in ('build', 'buildArch', 'buildContainer',\n 'buildMaven', 'buildSRPMFromSCM', 'maven'):\n return None\n # (I wish there was a better way to do this.)\n source = self.params[0]\n o = urlparse(source)\n # build tasks can load an SRPM from a \"cli-build\" tmpdir:\n if source.endswith('.src.rpm'):\n srpm = os.path.basename(source)\n (name, version, release) = srpm.rsplit('-', 2)\n # Note we're throwing away version and release here. They could be\n # useful eventually, maybe in a \"Package\" class.\n return name\n # or an allowed SCM:\n elif o.scheme:\n package = os.path.basename(o.path)\n if package.endswith('.git'):\n package = package[:-4]\n if self.method == 'buildContainer':\n package += '-container'\n return package\n raise ValueError('could not parse source \"%s\"' % source)", "def executor_object():\n try:\n return ExecutorThread.local_thread.executor_object\n except AttributeError:\n pass\n return None", "def get_installation_packages_latest(self):\n self.logger.debug(\"get_installation_packages_latest()\")\n parameter = {'onlyLatest':'False'}\n resp = self._im_session.get(\"{}/{}\".format(self._im_api_url, 'types/InstallationPackageWithLatest/instances'), params=parameter)\n #resp = self._im_session.get('https://192.168.100.52/types/InstallationPackageWithLatest/instances', params=parameter)\n jresp = json.loads(resp.text)\n\n #pprint(jresp)", "def fetch_package(source, method=None, headers=None, auth=None):\n #if method not in ('requests', 'curl_cli'):\n # raise Exception('Fetch package method \"{}\" not found'.format(method))\n if not method:\n method = detect_fetch_method()\n print('Using fetch method \"{}\"'.format(method))\n print('Source {}'.format(source))\n fetch_method = '_fetch_package_{}'.format(method)\n package = eval(fetch_method)(source, headers, auth)\n return package", "def fetch_executable_from_jenkins():\n\n base_job_url = os.environ.get('JENKINS_JOB_URL')\n if not base_job_url:\n error('Jenkins job URL for the builder is not specified.')\n\n build_json = json.loads(requests.get('%s/api/json'\n % base_job_url).text)\n last_build = build_json['lastCompletedBuild']['number']\n print 'Last build ID: %d' % last_build\n\n job_url = '%s/%d' % (base_job_url, last_build)\n last_build_json = json.loads(requests.get('%s/api/json'\n % job_url).text)\n if not last_build_json['artifacts']:\n error('No artifacts found!')\n\n artifacts_deb = [artifact for artifact in\n last_build_json['artifacts'] if '.dmg'\n in artifact['fileName']]\n artifact_url = '%s/artifact/%s' % (job_url,\n artifacts_deb[0]['relativePath'])\n file_name = artifacts_deb[0]['fileName']\n print 'Tribler installer url: %s' % artifact_url\n\n # Download the file\n file_path = os.path.join(os.environ.get('WORKSPACE'), file_name)\n download_response = requests.get(artifact_url, stream=True)\n download_response.raise_for_status()\n\n with open(file_path, 'wb') as handle:\n for block in download_response.iter_content(1024):\n handle.write(block)\n\n return file_path", "def get(self):\r\n #python = sys.executable\r\n #os.execl(python, python, * sys.argv)\r\n os.execl(sys.executable, *([sys.executable] + sys.argv))", "def elim_bootstrap_fetch(tree):\n\n boot = tree.find('.//target[@name=\"boot\"]')\n for child in boot.findall(\"./exec\"):\n boot.remove(child)\n echo = boot.find(\"./echo\")\n echo.attrib[\"message\"] = \"Not fetching bootstrap libraries in the Fedora build\"", "def main():\n get_obofoundry(force_download=True)", "def _pull(self) -> None:\n raise NotImplementedError() # pragma: no cover", "def download_and_extract(self, package_name):\n self.download(package_name)\n self.extract(package_name)", "def _provision_package(self):", "def get_package(self, __package_id):\n raise NotImplementedError", "def __call__(self):\n return self._executor()", "def get(self, executor=None):\n b = Bash(executor=executor)\n return b", "def getusersitepackages():\n\tpass", "def package(self, pkg_name):\n return self._pkgs[pkg_name]", "def getSlave(name):", "def _get_executor_init(self, workers):\n raise NotImplementedError", "def packages():", "def get_jarfile(self):", "def mixin_hub_pull_options_parser(parser):\n\n gp = add_arg_group(parser, title='Pull')\n gp.add_argument(\n '--install-requirements',\n action='store_true',\n default=False,\n help='If set, install `requirements.txt` in the Hub Executor bundle to local',\n ),\n gp.add_argument(\n '--force',\n action='store_true',\n default=False,\n help='If set, always pull the latest Hub Executor bundle even it exists on local',\n )", "def get_executable(self) -> str:\n ...", "def executor(self):\n if self.__executor_weakref:\n return self.__executor_weakref()", "def executor(self):", "def executorRouter(self, name, executor):\n pass", "def run(self):\n\n # noinspection PyBroadException\n try:\n r = request.urlopen(\"https://pypi.org/pypi/PartSeg/json\")\n data = json.load(r)\n self.release = data[\"info\"][\"version\"]\n self.url = data[\"info\"][\"home_page\"]\n except (KeyError, error.URLError):\n pass\n except Exception as e:\n with sentry_sdk.push_scope() as scope:\n scope.set_tag(\"auto_report\", \"true\")\n scope.set_tag(\"check_version\", \"true\")\n sentry_sdk.capture_exception(e)", "def pull(self):", "def read_task_package(self, pkg_name):\n\n # this method is slow in finding updates of tasks\n with self._lock:\n pkg_dir = self.get_package_location(pkg_name)\n signal = None\n sha1_hash = packaging.compute_sha1_hash(pkg_dir)\n internal_folder = os.path.join(self.tasks_dir_internal,\n pkg_name, sha1_hash)\n\n pkg = self.registry.get((pkg_name, None), None)\n if not pkg or pkg.version != sha1_hash:\n # copy this folder to tasks_dir_internal\n try:\n shutil.copytree(pkg_dir, internal_folder)\n except OSError:\n # already in tree, just update the timestamp so it shows\n # as the newest version\n os.utime('%s' % (internal_folder), None)\n logger.warn('Package %s v%s already exists' % (pkg_name,\n sha1_hash))\n # find updates\n if (pkg_name, None) not in self.registry:\n signal = 'TASK_ADDED'\n elif sha1_hash != self.registry[pkg.name, None].version:\n signal = 'TASK_UPDATED'\n\n if signal:\n pkg = self.init_package(pkg_name, sha1_hash)\n self.emit(signal, pkg_name)\n return pkg\n return None", "async def pypi(message):\n q = message.content.strip()\n if not q:\n raise CommandError(\"Search term required!\")\n\n def execute():\n return client.search({'name': q})\n\n data = await asyncio.get_event_loop().run_in_executor(None, execute)\n if len(data):\n return \"\\n\".join(map(lambda e:\n \"\\u2022 **{name}** ({version}) - {desc} <https://pypi.python.org/pypi/{name}>\".format(\n name=e['name'],\n version=e['version'],\n desc=e['summary']),\n data))\n else:\n raise CommandError(\"no results found\")", "def delayed_import(package_name: str) -> FuturePackage:\n return FuturePackage(package_name)", "def translate(self, package):\r\n if not isinstance(package, self._package_type):\r\n return None\r\n if not package.compatible(identity=self._identity, platform=self._platform):\r\n return None\r\n try:\r\n bdist = package.fetch(location=self._install_cache, conn_timeout=self._conn_timeout)\r\n except package.UnreadableLink as e:\r\n TRACER.log('Failed to fetch %s: %s' % (package, e))\r\n return None\r\n return DistributionHelper.distribution_from_path(bdist)", "def get_hub(*args, **kwargs): # pylint:disable=unused-argument\n\n return get_hub_noargs()", "def _client(self):\n return self.m.cipd.ensure_tool('infra/tools/luci/isolated/${platform}',\n self._version)", "def test_load_many_pip(self, build_image_for_jupyterlab):\n lb, username = build_image_for_jupyterlab[0], build_image_for_jupyterlab[5]\n keys = [\"pip&gtmunit1\", \"pip&gtmunit2\", \"pip&gtmunit3\"]\n loader = PackageLatestVersionLoader(keys, lb, username)\n promise1 = loader.load_many(keys)\n assert isinstance(promise1, Promise)\n\n version_list = promise1.get()\n assert len(version_list) == 3\n assert version_list[0] == \"0.12.4\"\n assert version_list[1] == \"12.2\"\n assert version_list[2] == \"5.0\"", "def _get_next_wb(self) -> Optional[Package]:\n for dist in self.distributions:\n for arch in self.architectures:\n response = self._query_wannabuild(arch, dist,\n '--list=needs-build')\n pending = response.split('\\n')\n if not pending[0]:\n continue\n result = self._take(pending[0])\n if result:\n return result\n return None", "def package(self) -> pulumi.Output['outputs.PackageResponse']:\n return pulumi.get(self, \"package\")", "def LocalCommand(TestinfraBackend):\n return testinfra.get_backend(\"local://\").get_module(\"Command\")", "def get_class():\n return CuBoltServerScript", "def pull1(repo, **kwargs):\n ret = do_pull(repo, \"topology.virl\")\n if not ret:\n exit(1)", "def main():\n configure_logging()\n\n # Attributes tell us what subscription has been created for us to listen to.\n project = get_metadata('instance/attributes/pubsub_subscription_project')\n service_account = get_metadata('instance/attributes/pubsub_service_account')\n subscription = get_metadata('instance/attributes/pubsub_subscription')\n pubsub = PubSub(service_account=service_account)\n\n while True:\n logging.info('Polling for new messages')\n ack_ids = []\n start_time = time.time()\n response = pubsub.pull(subscription, project)\n for message in response.get('receivedMessages', []):\n ack_ids.append(message['ackId'])\n attributes = message['message'].get('attributes', {})\n message = base64.b64decode(message['message'].get('data', ''))\n logging.info(\n 'Received message: %s\\nAttributes: %s',\n message,\n json.dumps(attributes, indent=2),\n )\n\n if message == 'CONNECT' and attributes.get('swarming_server'):\n if os.path.exists(SWARMING_UPSTART_CONFIG_DEST):\n os.remove(SWARMING_UPSTART_CONFIG_DEST)\n shutil.copy2(SWARMING_UPSTART_CONFIG_SRC, SWARMING_UPSTART_CONFIG_DEST)\n\n if not os.path.exists(SWARMING_BOT_DIR):\n os.mkdir(SWARMING_BOT_DIR)\n chrome_bot = pwd.getpwnam(CHROME_BOT)\n os.chown(SWARMING_BOT_DIR, chrome_bot.pw_uid, chrome_bot.pw_gid)\n\n if os.path.exists(SWARMING_BOT_ZIP):\n # Delete just the zip, not the whole directory so logs are kept.\n os.remove(SWARMING_BOT_ZIP)\n\n bot_code = urllib2.urlopen(urlparse.urljoin(\n attributes.get('swarming_server'), 'bot_code'))\n with open(SWARMING_BOT_ZIP, 'w') as fd:\n shutil.copyfileobj(bot_code, fd)\n os.chown(SWARMING_BOT_ZIP, chrome_bot.pw_uid, chrome_bot.pw_gid)\n\n pubsub.acknowledge(subscription, project, ack_ids)\n subprocess.check_call(['/sbin/shutdown', '-r', 'now'])\n elif message == 'LEASED' and attributes.get('lease_expiration_ts'):\n with open(LEASE_EXPIRATION_FILE, 'w') as f:\n f.write(attributes['lease_expiration_ts'])\n\n if ack_ids:\n pubsub.acknowledge(subscription, project, ack_ids)\n if time.time() - start_time < 1:\n # Iterate at most once per second (chosen arbitrarily).\n time.sleep(1)", "def selenium(self):\n return self.builtin.get_library_instance(\"SeleniumLibrary\")", "def deploy_pull_master(self, restart=True):\n self.ops.local(\"cd \"+self.local_path+\"/src && git reset --hard HEAD && git pull origin master && git submodule update\")\n PiService.deploy(self, restart)", "def executable(self, tool_locator):\n return tool_locator.find_executable(\"smack.sh\")", "def pull(self, workload: str) -> str:\n raise NotImplementedError", "def mixin_hub_pull_parser(parser):\n\n def hub_uri(uri: str) -> str:\n from ...hubble.helper import parse_hub_uri\n\n parse_hub_uri(uri)\n return uri\n\n parser.add_argument(\n 'uri',\n type=hub_uri,\n help='The URI of the executor to pull (e.g., jinahub[+docker]://UUID8)',\n )\n mixin_hub_pull_options_parser(parser)", "def get_nupack_exec_path(exec_name):\n if 'NUPACKHOME' in os.environ:\n if('3.0' in os.environ['NUPACKHOME']):\n return os.environ['NUPACKHOME'] + '/bin/' + exec_name;\n if('3.2' in os.environ['NUPACKHOME']):\n return os.environ['NUPACKHOME'] + '/build/bin/' + exec_name;\n else:\n return exec_name;", "def download(self, args):\n\n\t\t\"\"\" Default argument for Architecture \"\"\"\n\t\tif len(args) >= 4:\n\t\t\tarch = args[3]\n\t\telse:\n\t\t\tarch = platform.processor()\n\n\t\t\"\"\" Default argument for Version \"\"\"\n\t\tif len(args) >= 3:\n\t\t\tif args[2] == \"latest\":\n\t\t\t\tversion = \"Latest\"\n\t\t\telse:\n\t\t\t\tversion = args[2]\n\t\telse:\n\t\t\tversion = \"Latest\"\n\n\t\t\"\"\" Find package path from package list, based on prev. arguments \"\"\"\n\t\tif len(args) >= 2:\n\t\t\tpackage = args[1]\n\t\t\tfilename = False\n\t\t\t\n\t\t\tversions = self.master.Dump(package)\n\t\t\tfor d in versions:\n\t\t\t\tif d[\"Version\"] == version:\n\t\t\t\t\tif d[\"Version\"] != \"Latest\" and d[\"Architecture\"] == arch:\n\t\t\t\t\t\tfilename = d[\"Filename\"]\n\t\t\t\t\telse:\n\t\t\t\t\t\tfor e in versions:\n\t\t\t\t\t\t\tif e[\"Version\"] == d[\"LatestVersion\"] and e[\"Architecture\"] == arch:\n\t\t\t\t\t\t\t\tfilename = e[\"Filename\"]\n\t\t\t\t\t\t\t\tversion = d[\"LatestVersion\"];\n\t\t\tif not filename:\n\t\t\t\tself.write_line(\"ERROR XXX: Package not found.\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Find chunks to download \"\"\"\n\t\t\tid = 0\n\t\t\tto_download = False\n\t\t\tfor f in self.torrent_info.files():\n\t\t\t\tprint(f.path.replace(\"packages/\", \"\") + \" = \" + filename);\n\t\t\t\tif f.path.replace(\"packages/\", \"\") == filename:\n\t\t\t\t\tto_download = f\n\t\t\t\t\tbreak;\n\t\t\t\tid += 1\n\t\t\tif not to_download:\n\t\t\t\tprint(\"ERROR XXX: dunno\")\n\t\t\t\treturn\n\n\t\t\t\"\"\" Set chunks priority to 7? (download max priority) \"\"\"\n\t\t\tpr = self.torrent_info.map_file(id, 0, to_download.size);\n\t\t\tn_pieces = math.ceil(pr.length / self.torrent_info.piece_length() + 1);\n\n\t\t\tfor i in range(self.torrent_info.num_pieces()):\n\t\t\t\tif i in range(pr.piece, pr.piece + n_pieces):\n\t\t\t\t\tself.handler.piece_priority(i, 7)\n\n\n\t\t\t\"\"\" Print download of package status \"\"\"\n\t\t\tself.print_status(id, pr, package, version, filename)\n\t\t\t\t\n\t\t\t\"\"\" Check the server for hash validation \"\"\"\n\t\t\tif self.valid_tpkg_file(to_download.path):\n\t\t\t\tself.write_line(\"DONE {0} {1} {2} {3}\".format(package, version, arch, self.config[\"daemon\"][\"rootdir\"] + \"/\" + to_download.path).replace('//', '/'))\n\t\t\telse:\n\t\t\t\tself.write_line(\"ERROR XXX: Hash verification failed.\")\n\t\telse:\n\t\t\tself.write_line(\"INVALID ARGUMENTS\");", "def connect_to_master():", "def get_active_package(self):\n return self.compute", "def RemoteBuild(self, image):\n raise NotImplementedError()", "def fetch_package(self, package_name):\n\t\t\t\n\t\t\tpackage_root_url = urlparse.urljoin(self.packages_root_url,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tpackage_name + \"/\")\n\t\t\t\n\t\t\tpackage_info_url = urlparse.urljoin(package_root_url, \"info\")\n\t\t\tpackage_archive_url = urlparse.urljoin(package_root_url, \"archive\")\n\t\t\t\n\t\t\tlogger.debug(\"Get: {0}\".format(package_info_url))\n\t\t\ttry:\n\t\t\t\tinfo = json.loads(urllib2.urlopen(package_info_url).read())\n\t\t\t\treturn ups.package.Package(self, package_root_url, info)\n\t\t\texcept urllib2.HTTPError as e:\n\t\t\t\traise RepositoryError(e)\n\t\t\texcept ValueError as e:\n\t\t\t\traise RepositoryError(\"Unable to parse info file: {0}\".format(e))", "def get_pypi_serial(self):\n with self._conn.begin():\n return self._conn.scalar(\"VALUES (get_pypi_serial())\")", "def select_binary(base_path, version, name, config=None):\r\n # TODO(John Sirois): finish doc of the path structure expexcted under base_path\r\n config = config or Config.load()\r\n bootstrap_dir = config.getdefault('pants_bootstrapdir')\r\n baseurl = config.getdefault('pants_support_baseurl')\r\n timeout_secs = config.getdefault('pants_support_fetch_timeout_secs', type=int, default=30)\r\n\r\n sysname, _, release, _, machine = os.uname()\r\n os_id = _ID_BY_OS[sysname.lower()]\r\n if os_id:\r\n middle_path = _PATH_BY_ID[os_id(release, machine)]\r\n if middle_path:\r\n binary_path = os.path.join(base_path, *(middle_path + [version, name]))\r\n bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path)\r\n if not os.path.exists(bootstrapped_binary_path):\r\n url = posixpath.join(baseurl, binary_path)\r\n log.info('Fetching %s binary from: %s' % (name, url))\r\n downloadpath = bootstrapped_binary_path + '~'\r\n try:\r\n with closing(urllib_request.urlopen(url, timeout=timeout_secs)) as binary:\r\n with safe_open(downloadpath, 'wb') as bootstrapped_binary:\r\n bootstrapped_binary.write(binary.read())\r\n\r\n os.rename(downloadpath, bootstrapped_binary_path)\r\n chmod_plus_x(bootstrapped_binary_path)\r\n except (IOError, urllib_error.HTTPError, urllib_error.URLError) as e:\r\n raise TaskError('Failed to fetch binary from %s: %s' % (url, e))\r\n finally:\r\n safe_delete(downloadpath)\r\n log.debug('Selected %s binary bootstrapped to: %s' % (name, bootstrapped_binary_path))\r\n return bootstrapped_binary_path\r\n raise TaskError('No %s binary found for: %s' % (name, (sysname, release, machine)))", "def populate_package(package_count: int) -> None:\n logging.info(f\"Fetching {package_count} packages\")\n response = CurlController.send_get_request(url=CONFIG.EXTERNAL_API.ALL_PACKAGES)\n get_version = False\n count = 0\n temp_dir = filestore.generate_temp_dir()\n # Local Testing\n # response_arr = ['Package: A3', 'Version: 1.0.0', 'Depends: R (>= 2.15.0), xtable, pbapply', 'Suggests: randomForest, e1071', 'License: GPL (>= 2)', 'MD5sum: 027ebdd8affce8f0effaecfcd5f5ade2', 'NeedsCompilation: no', '', 'Package: aaSEA', 'Version: 1.1.0', 'Depends: R(>= 3.4.0)', 'Imports: DT(>= 0.4), networkD3(>= 0.4), shiny(>= 1.0.5),', ' shinydashboard(>= 0.7.0), magrittr(>= 1.5), Bios2cor(>= 2.0),', ' seqinr(>= 3.4-5), plotly(>= 4.7.1), Hmisc(>= 4.1-1)', 'Suggests: knitr, rmarkdown', 'License: GPL-3', 'MD5sum: 0f9aaefc1f1cf18b6167f85dab3180d8', 'NeedsCompilation: no', '', 'Package: AATtools', 'Version: 0.0.1', 'Depends: R (>= 3.6.0)', 'Imports: magrittr, dplyr, doParallel, foreach', 'License: GPL-3', 'MD5sum: 3bd92dbd94573afb17ebc5eab23473cb', 'NeedsCompilation: no', '', 'Package: ABACUS', 'Version: 1.0.0', 'Depends: R (>= 3.1.0)', 'Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),', 'Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)', 'License: GPL-3', 'MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd', 'NeedsCompilation: no', '', 'Package: abbyyR', 'Version: 0.5.5', 'Depends: R (>= 3.2.0)', 'Imports: httr, XML, curl, readr, plyr, progress', 'Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr', 'License: MIT + file LICENSE', 'MD5sum: e048a3bca6ea32126e6c367415c0bfaf', 'NeedsCompilation: no', '', 'Package: abc', 'Version: 2.1', 'Depends: R (>= 2.10), abc.data, nnet, quantreg, MASS, locfit', 'License: GPL (>= 3)', 'MD5sum: c9fffe4334c178917f762735aba59653', 'NeedsCompilation: no', '', 'Package: abc.data', 'Version: 1.0', 'Depends: R (>= 2.10)', 'License: GPL (>= 3)', 'MD5sum: 799079dbbdd0cfc9d9c61c3e35241806', 'NeedsCompilation: no', '', 'Package: ABC.RAP', 'Version: 0.9.0', 'Depends: R (>= 3.1.0)', 'Imports: graphics, stats, utils', 'Suggests: knitr, rmarkdown', 'License: GPL-3', 'MD5sum: 38c65a7251d28ef2462ee430ded95700', 'NeedsCompilation: no', '', 'Package: abcADM', 'Version: 1.0', 'Imports: Rcpp (>= 1.0.1)', 'LinkingTo: Rcpp, BH', 'License: GPL-3', 'MD5sum: 8134f67912b506194e3dab4ccd6e75f7', 'NeedsCompilation: yes', '', 'Package: ABCanalysis', 'Version: 1.2.1', 'Depends: R (>= 2.10)', 'Imports: plotrix', 'License: GPL-3', 'MD5sum: 678e03837e25a922bf71bafe1f8de617', 'NeedsCompilation: no', '', 'Package: abcdeFBA', 'Version: 0.4', 'Depends: Rglpk,rgl,corrplot,lattice,R (>= 2.10)', 'Suggests: LIM,sybil', 'License: GPL-2', 'MD5sum: c84d45a85d8ab6bbe517365e8845db83', 'NeedsCompilation: no', '', 'Package: ABCoptim', 'Version: 0.15.0', 'Imports: Rcpp, graphics, stats, utils', 'LinkingTo: Rcpp', 'Suggests: testthat, covr', 'License: MIT + file LICENSE', 'MD5sum: a62ed03650273c09899655065437078f', 'NeedsCompilation: yes', '', 'Package: ABCp2', 'Version: 1.2', 'Depends: MASS', 'License: GPL-2', 'MD5sum: e920282d5a369df71e15241be40cb60e', 'NeedsCompilation: no', '', 'Package: abcrf', 'Version: 1.8.1', 'Depends: R(>= 3.1)', 'Imports: readr, MASS, matrixStats, ranger, doParallel, parallel,', ' foreach, stringr, Rcpp (>= 0.11.2)', 'LinkingTo: Rcpp, RcppArmadillo', 'License: GPL (>= 2)', 'MD5sum: 4d5a304f46d117226791523cef4e2427', 'NeedsCompilation: yes', '', 'Package: abcrlda', 'Version: 1.0.3', 'Imports: stats', 'License: GPL-3', 'MD5sum: 651e6e18e08916b443aaf011b5a63525', 'NeedsCompilation: no', '', 'Package: abctools', 'Version: 1.1.3', 'Depends: R (>= 2.10), abc, abind, parallel, plyr, Hmisc', 'Suggests: ggplot2, abc.data', 'License: GPL (>= 2)', 'MD5sum: c5937b65837ef7e6bfbe141cea257f40', 'NeedsCompilation: yes', '', 'Package: abd', 'Version: 0.2-8', 'Depends: R (>= 3.0), nlme, lattice, grid, mosaic', 'Suggests: boot, car, ggplot2, plyr, HH, ICC, vcd, Hmisc', 'License: GPL-2', 'MD5sum: 1913d76a0fbc44222709381f63f385b9', 'NeedsCompilation: no', '', 'Package: abdiv', 'Version: 0.2.0', 'Imports: ape', 'Suggests: testthat (>= 2.1.0), vegan', 'License: MIT + file LICENSE', 'MD5sum: 80931c0ca85ba5386000bf617552c5ce', 'NeedsCompilation: no', '', 'Package: abe', 'Version: 3.0.1', 'License: GPL (>= 2)', 'MD5sum: 9c151db5397422c8927dee41dabfbfab', 'NeedsCompilation: no', '', 'Package: abess', 'Version: 0.3.0', 'Depends: R (>= 3.1.0)', 'Imports: Rcpp, MASS, methods, Matrix', 'LinkingTo: Rcpp, RcppEigen', 'Suggests: testthat, knitr, rmarkdown', 'License: GPL (>= 3) | file LICENSE', 'MD5sum: e0ea7d068147c49c011c7135ab290bd3', 'NeedsCompilation: yes', '', 'Package: abf2', 'Version: 0.7-1', 'License: Artistic-2.0', 'MD5sum: 6792a51c6fb3e239165d69aa8a71d3cd', 'NeedsCompilation: no', '', 'Package: abglasso', 'Version: 0.1.1', 'Imports: MASS, pracma, stats, statmod', 'Suggests: testthat', 'License: GPL-3', 'MD5sum: 18bd0759cd005c5ac6fb515799b3f3d8', 'NeedsCompilation: no', '', 'Package: ABHgenotypeR', 'Version: 1.0.1', 'Imports: ggplot2, reshape2, utils', 'Suggests: knitr, rmarkdown', 'License: GPL-3', 'MD5sum: ca4397ba7390c0e0a3728c0cda864494', 'NeedsCompilation: no', '', 'Package: abind', 'Version: 1.4-5', 'Depends: R (>= 1.5.0)', 'Imports: methods, utils', 'License: LGPL (>= 2)', 'MD5sum: 136f981e1c4f618b64a87faaa7797c97', 'NeedsCompilation: no', '', 'Package: abjutils', 'Version: 0.3.1', 'Depends: R (>= 4.0)', 'Imports: dplyr, magrittr, purrr, rlang, rstudioapi, stringi, stringr,', ' tidyr', 'Suggests: testthat', 'License: MIT + file LICENSE', 'MD5sum: a596c07aaa7f82e5d123b2f7354e5b55', 'NeedsCompilation: no', '', 'Package: abmR', 'Version: 1.0.2', 'Depends: R (>= 3.5)', 'Imports: sp, rgdal, table1, googledrive, swfscMisc, geosphere,', ' kableExtra, gtsummary, ggplot2, gstat, purrr, rnaturalearth,', ' rnaturalearthdata, sf, tmap, raster, utils, stats, methods,', ' rgeos', 'Suggests: jpeg, knitr', 'License: GPL (>= 3)', 'MD5sum: cf96d']\n response_arr = response.decode(\"utf-8\").split(\"\\n\")\n with temp_dir:\n for item in response_arr:\n if count >= package_count:\n break\n if get_version:\n # Fetching the version, once we have the package name\n package_version = Command.get_package_version(item=item)\n if package_version:\n # Generating the required URL for the package to fetch the details\n package_url = Template(\n CONFIG.EXTERNAL_API.PACKAGE_DETAIL\n ).substitute(\n package_name=package_name,\n separator=\"_\",\n package_version=package_version,\n )\n logging.info(f\"Downloading {package_url}\")\n # Downloading the details of the package and extracting the DESCRIPTION file\n extract_file_path = filestore.join_paths(\n prefix=package_name,\n suffix=CONFIG.EXTERNAL_API.DETAIL_FILE_NAME,\n )\n target_dir = filestore.download_file(\n url=package_url,\n temp_dir=temp_dir,\n extract_file_path=extract_file_path,\n )\n # Reading contents of DESCRIPTION file\n package_details = filestore.join_paths(\n prefix=temp_dir.name,\n suffix=extract_file_path,\n )\n with open(package_details) as details_file:\n for line in details_file:\n if line.startswith(PackageInfoPrefix.PUBLICATION_DATE):\n publication_time_str = (\n Command.get_publication_timestamp(line)\n )\n publication_timestamp = (\n datetime_util.string_to_datetime(\n publication_time_str\n )\n )\n elif line.startswith(PackageInfoPrefix.TITLE):\n title = Command.get_package_title(line)\n elif line.startswith(PackageInfoPrefix.DESCRIPTION):\n description = Command.get_package_description(line)\n elif line.startswith(PackageInfoPrefix.AUTHOR):\n (\n author_name,\n author_email,\n ) = Command.get_package_author(line)\n elif line.startswith(PackageInfoPrefix.MAINTAINER):\n (\n maintainer_name,\n maintainer_email,\n ) = Command.get_package_maintainer(line)\n\n package_info_dict = {\n \"name\": package_name,\n \"version\": package_version,\n \"publication_timestamp\": publication_timestamp,\n \"title\": title,\n \"description\": description,\n \"author_name\": author_name,\n \"author_email\": author_email,\n \"maintainer_name\": maintainer_name,\n \"maintainer_email\": maintainer_email,\n }\n logging.info(package_info_dict)\n obj = PackageManager.create_object(\n create_data=package_info_dict\n )\n if obj == CONFIG.DB.FAILURE:\n raise Exception(f\"Could not insert package in DB\")\n count += 1\n get_version = False\n # Fetching the package name\n package_name = Command.get_package_name(item=item)\n if package_name:\n get_version = True", "def get_package_info(package_name):\n r = requests.get(f'https://api.npms.io/v2/search?q={package_name}&size=1')\n response_json = r.json()\n\n if 'results' in response_json:\n result = response_json['results'][0]\n return result['package']", "def fetch_exe_from_jenkins():\n base_job_url = os.environ.get(\"JENKINS_JOB_URL\")\n if not base_job_url:\n print \"Jenkins job URL for the builder is not specified.\"\n sys.exit(-1)\n\n build_json = json.loads(requests.get(\"%s/api/json\" % base_job_url).text)\n last_build = build_json['lastCompletedBuild']['number']\n print \"Last build ID: %d\" % last_build\n\n job_url = '%s/%d' % (base_job_url, last_build)\n last_build_json = json.loads(requests.get(\"%s/api/json\" % job_url).text)\n if len(last_build_json['artifacts']) == 0:\n error(\"No artifacts found!\")\n\n artifact_url = \"%s/artifact/%s\" % (job_url, last_build_json['artifacts'][0]['relativePath'])\n file_name = last_build_json['artifacts'][0]['fileName']\n print \"Tribler installer url: %s\" % artifact_url\n\n # Download the file\n file_path = os.path.join(os.environ.get('WORKSPACE'), file_name)\n download_response = requests.get(artifact_url, stream=True)\n download_response.raise_for_status()\n\n with open(file_path, 'wb') as handle:\n for block in download_response.iter_content(1024):\n handle.write(block)\n\n return file_path", "async def install(self) -> None:\n tasks = [asyncio.create_task(self.miners[miner].main_loop()) for miner in self.miners]\n await asyncio.gather(*tasks)", "def run(*args: Any, **kwargs: Any) -> None:\n # ArgumentParser setup\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-c\", \"--config\",\n help=\"kojid configuration file path\",\n default=\"/etc/kojid/kojid.conf\")\n\n # koji-builder options setup\n ns = parser.parse_args(*args, **kwargs)\n c = config.Config.from_path(ns.config)\n opts = c.section('kojid')\n \n # koji-builder session setup\n logger.info('Authenticating...')\n s = session_setup(opts)\n s.session.exclusiveSession(force=True)\n logger.info('Authenticated.')\n\n # runner setup (kube, mock, etc)\n # scheduler setup (uses runner)\n # task handler setup (used by runners)\n # log each setup step\n\n h = host.Host(s.session, 'mbbox.default', capacity=3.0)\n h.sync()\n\n host_id = s.session.host.getID()\n logger.info(f'Using koji-builder host id: \"{host_id}\".')\n\n st_expired = koji.BR_STATES['EXPIRED']\n states = [koji.BR_STATES[k] for k in koji.BR_STATES]\n tasks: Dict[Any, Any] = {}\n loop_interval = int(os.environ.get('KOJI_BUILDER_INTERVAL', '5'))\n\n while True:\n #build roots\n build_roots = s.session.listBuildroots(hostID=host_id, state=tuple(states))\n logger.info(f'Total Build Roots found: {len(build_roots)}')\n build_roots = dict([(row['id'], row) for row in build_roots])\n for k, v in build_roots.items():\n task_id = v['task_id']\n uid = v['id']\n tag_name = v['tag_name']\n arch = v['arch']\n if task_id is None:\n # not associated with a task\n # this makes no sense now, but may in the future\n logger.warning(f'Expiring taskless buildroot: {uid}/{tag_name}/{arch}')\n s.session.host.setBuildRootState(id, st_expired)\n elif task_id not in tasks:\n logger.info('Expiring buildroot: {uid}/{tag_name}/{arch}')\n logger.debug(f'Buildroot task: {task_id}, Current tasks: {to_list(tasks.keys())}')\n s.session.host.setBuildRootState(uid, st_expired)\n continue\n #tasks\n tasks = h.get_tasks()\n if len(tasks) == 0:\n logger.info('0 tasks found, sleeping for 5 seconds.')\n utils.wait(loop_interval)\n continue\n print(tasks)\n\n # scheduler checks for new tasks (see flow)\n # identify the type of task handler to use\n # scheduler provision a task to run (pod running with some command?)\n # task status check + update (most likely includes FS updates and koji-hub api calls)\n # may need to store logs someplace else\n # errors should have a status of failure and store logs\n if mode == Modes.TEST:\n break\n elif mode == Modes.DAEMON:\n utils.wait(loop_interval)", "def fetch_packages(distribution: str):\n inbox = f'/var/aptly/inbox/{distribution}'\n run_cmd(['aptly', 'repo', 'add', '-remove-files', distribution, inbox])", "def msgpull():\n async def unknown():\n async for msg in _privateapi: # required to consume messages...\n print(f\"Unknown message: {msg}\") # TODO : probaby onto some error log somehow...\n\n if msgpull_task is None:\n asyncio.get_running_loop().create_task(unknown())", "def m_DownPkgAndTar(self,pkgURL,machineIps,port,username,password):\n packageName = pkgURL.split(\"/\")[-1]\n execmd = \"cd /root\\nwget \" + pkgURL + \"\\ntar -xzvf \" + packageName\n for k, v in machineIps.items():\n b.sshclient_execmd(k, port,username,password,execmd)", "def _fetch_package_requests(source, headers, auth):\n import requests\n dest = build_temp_package_filepath()\n response = requests.get(source, stream=True, headers=headers, auth=auth)\n response.raise_for_status()\n with open(dest, 'wb') as handle:\n for block in response.iter_content(1024):\n handle.write(block)\n package = tarfile.open(dest)\n return package", "def bootstrap(): # pragma: no cover, exercised via test_bootstrap() functional test\n pspec = PackageSpec(CFG, \"%s==%s\" % (PICKLEY, __version__))\n grand_parent = runez.parent_folder(runez.parent_folder(__file__))\n if grand_parent and grand_parent.endswith(\".whl\"):\n # We are indeed running from pex\n setup_audit_log()\n python = CFG.find_python(\"/usr/bin/python3\") # Prefer system py3, for stability\n if not python or python.problem:\n python = pspec.python\n\n LOG.debug(\"Bootstrapping pickley %s with %s (re-installing as venv instead of pex package)\" % (pspec.version, python))\n target = pspec.install_path\n venv = PythonVenv(target, python, pspec.index)\n venv.pip_install(\"wheel\")\n with runez.TempFolder():\n venv.run_python(\"-mwheel\", \"pack\", grand_parent)\n names = os.listdir(\".\")\n assert len(names) == 1\n venv.pip_install(names[0])\n\n delivery = DeliveryMethod.delivery_method_by_name(pspec.settings.delivery)\n return delivery.install(pspec, venv, {PICKLEY: \"bootstrapped\"})\n\n else:\n manifest = pspec.get_manifest()\n if not manifest:\n # We're not running from pex, but we need to re-install pickley with latest version, so it gets a manifest etc\n return perform_install(pspec, is_upgrade=False, quiet=False)", "def get_hub_class():\n hubtype = _threadlocal.Hub\n if hubtype is None:\n hubtype = _threadlocal.Hub = Hub\n return hubtype", "def execute(self):\n st.logger.info(\"Starting fetch symbols use case\")\n try:\n rabbit_adapter = RabbitmqServiceAdapter(repository=MongoRepositoryAdapter(),\n domain_service=DomainService())\n thread = threading.Thread(target=rabbit_adapter.fetch_symbol_data)\n thread.start()\n\n except ServiceException:\n st.logger.error(\"Fetch symbols use case error, service restart is required!\")\n return", "def __install(self):\n command = self.pipComboBox.currentText()\n if command == self.__default:\n command = \"\"\n \n packages = []\n for itm in self.resultList.selectedItems():\n packages.append(itm.text(0).strip())\n if packages:\n self.__pip.installPackages(packages, cmd=command)", "def hub(self) -> N4Hub:\n return self.platform_common.hub", "def get_spynl_package(name, packages=None):\n if packages is None:\n packages = get_spynl_packages()\n return next(filter(lambda p: p.project_name == name, packages), None)", "def pypackage(self):\n nb = read_notebook(self._nb_path)\n add_pkgs = None\n if \"nbproject\" in nb.metadata and \"pypackage\" in nb.metadata[\"nbproject\"]:\n if nb.metadata[\"nbproject\"][\"pypackage\"] is not None:\n add_pkgs = nb.metadata[\"nbproject\"][\"pypackage\"].keys()\n return infer_pypackages(nb, add_pkgs, pin_versions=True)", "def package():\n pass", "def find_module(self, name):\n if name in self.pool:\n return self.pool[name]\n else:\n return None", "def runOnIBMQ(self):\n IBMQ.save_account(self.API_TOKEN)\n IBMQ.load_account()\n provider = IBMQ.get_provider('ibm-q')\n qcomp = provider.get_backend(self.device)\n job = execute(self.circuit, backend=qcomp, shots=self.shots)\n return \"https://quantum-computing.ibm.com/results/\"+job.job_id()", "def extract_r_packages(self, index: int) -> Optional[Packages]:\n if self[index].startswith(R_COMMAND):\n r_packages = Packages()\n install_commands = self[index].split(\";\")[1:]\n for command in install_commands:\n start = command.index('\"') + 1\n end = command.index('\"', start)\n name = command[start:end]\n if \"version\" in command:\n start = command.index('\"', end + 1) + 1\n end = command.index('\"', start)\n version = command[start:end]\n r_packages.append_spec(f\"{name}={version}\")\n else:\n r_packages.append_spec(name)\n return r_packages\n return None", "def resolve(config, interpreter, logger=print):\r\n\r\n setuptools_requirement = failsafe_parse(\r\n 'setuptools==%s' % config.get('python-setup', 'setuptools_version', default='2.2'))\r\n wheel_requirement = failsafe_parse(\r\n 'wheel==%s' % config.get('python-setup', 'wheel_version', default='0.22.0'))\r\n\r\n interpreter = resolve_interpreter(config, interpreter, setuptools_requirement, logger=logger)\r\n if interpreter:\r\n return resolve_interpreter(config, interpreter, wheel_requirement, logger=logger)", "def core():\n\n from fabtools.require.deb import package as require_deb_package\n from fabtools.require.rpm import package as require_rpm_package\n\n family = distrib_family()\n\n # Check if sudo command exists\n if not files.exists('/usr/bin/sudo'):\n raise Exception(\"Please install the sudo package and execute adduser %s sudo\" % env.user)\n\n\n if not files.exists('/usr/bin/docker'):\n if family == 'debian':\n require_deb_package('curl')\n elif family == 'redhat':\n require_rpm_package('curl')\n else:\n raise UnsupportedFamily(supported=['debian', 'redhat'])\n\n # Download docker installation\n run_as_root('curl -sSL https://get.docker.com/ | sh')", "def _get_backend_module(name):\n if name == \"numpy\":\n import numpy as np\n\n return np\n if name == \"numpy.ma\":\n import numpy as np\n\n return np.ma\n if name == \"torch\":\n import torch\n\n return torch\n if name == \"jax\":\n import jax\n import jax.numpy as jnp\n\n _JAX_KEY = jax.random.PRNGKey(0)\n return jnp\n if name == \"tensorflow\":\n import tensorflow as tf\n\n return tf", "def value(self, executor: Callable[[ast.AST], Any] = None) -> Any:\n # Event loops to run async tasks in python are \"funny\". We can't wait for a task if\n # one of those loops is running. So that is improper use of the library.\n try:\n loop = asyncio.get_running_loop()\n raise runtime_error ('A python async event loop is already running. You must use future_value.')\n except:\n pass\n\n # Run our own event loop to make sure we get back the result and we are self contained\n # and don't stomp on anyone. Since we aren't just waiting on sockets, we will have to\n # have an OS difference here.\n if os.name == 'nt':\n loop = asyncio.ProactorEventLoop() # for subprocess' pipes on Windows\n else:\n loop = asyncio.get_event_loop()\n return loop.run_until_complete(self.future_value(executor))", "def _get_driver():\n return etcd_driver.get_driver()", "def get_backend():\n return sys.modules[__name__]", "def get_backend():\n return sys.modules[__name__]", "def _package(self):\n if self._package_obj is None:\n self._package_obj = self._import_package()\n\n return self._package_obj", "def pull(self):\n raise NotImplementedError()" ]
[ "0.641738", "0.61639076", "0.58618015", "0.5724971", "0.53448755", "0.52730477", "0.5237997", "0.5234491", "0.52015465", "0.51506555", "0.5101893", "0.5095056", "0.5087197", "0.50491524", "0.5016268", "0.50123864", "0.5008125", "0.50051403", "0.4996761", "0.4936391", "0.49340165", "0.49302584", "0.49109596", "0.49028966", "0.4897555", "0.48949736", "0.48827088", "0.4873315", "0.4844857", "0.47952744", "0.47853094", "0.4780949", "0.4779444", "0.47786918", "0.47732803", "0.47711337", "0.47625288", "0.47528", "0.4746602", "0.47330225", "0.47315827", "0.4723496", "0.47049868", "0.47035554", "0.470295", "0.46974882", "0.46961436", "0.46879393", "0.46861738", "0.4676616", "0.46741694", "0.46589696", "0.46529314", "0.46400282", "0.4639905", "0.4633995", "0.46311736", "0.46284342", "0.46176794", "0.46161282", "0.4610925", "0.45979822", "0.45906866", "0.45903012", "0.45835856", "0.45641017", "0.45575082", "0.4553001", "0.4546061", "0.45436272", "0.45373696", "0.453357", "0.45290026", "0.45279387", "0.4518194", "0.4516305", "0.45101714", "0.45060405", "0.44998887", "0.44980618", "0.44898137", "0.4487096", "0.44858938", "0.4482447", "0.44807932", "0.4478599", "0.44769847", "0.44714758", "0.44628176", "0.4460177", "0.4457781", "0.4451752", "0.44513744", "0.44503483", "0.4446934", "0.44414878", "0.44410294", "0.44410294", "0.44403267", "0.44382003" ]
0.77400756
0
Function identifie_cross Identifie the person in the set of corrds given by PoseNet using both methods
def identifie_cross(self, image, keypoints_list, keypoint_coords, lambda_factor=0.8): return self.char_color.identifie_cross(image, keypoints_list, keypoint_coords, lambda_factor=lambda_factor)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def crossover(cross):\r\n @functools.wraps(cross)\r\n def ecspy_crossover(random, candidates, args):\r\n if len(candidates) % 2 == 1:\r\n candidates = candidates[:-1]\r\n moms = candidates[::2]\r\n dads = candidates[1::2]\r\n children = []\r\n for i, (mom, dad) in enumerate(zip(moms, dads)):\r\n cross.index = i\r\n offspring = cross(random, mom, dad, args)\r\n for o in offspring:\r\n children.append(o)\r\n return children\r\n ecspy_crossover.single_crossover = cross\r\n return ecspy_crossover", "def crossover(cross):\n @functools.wraps(cross)\n def inspyred_crossover(random, candidates, args):\n if len(candidates) % 2 == 1:\n candidates = candidates[:-1]\n moms = candidates[::2]\n dads = candidates[1::2]\n children = []\n for i, (mom, dad) in enumerate(zip(moms, dads)):\n cross.index = i\n offspring = cross(random, mom, dad, args)\n for o in offspring:\n children.append(o)\n return children\n inspyred_crossover.single_crossover = cross\n return inspyred_crossover", "def nsi_cross_degree(self, node_list1, node_list2):\n cross_A = (self.adjacency\n + np.eye(self.N))[node_list1, :][:, node_list2]\n return (cross_A * self.node_weights[node_list2]).sum(axis=1)", "def cross(self):\n\n for i in range(self.pop_num): # Put in the first pop_num elements of the \"Parents and Sons\" array our entire input population.\n self.par_and_sons[i].A=self.population[i].A.copy()\n\n random.shuffle(self.population) # Shuffle population.\n\n tt=0 # The counter that is needed to implement a non-trivial crossing.\n for s in range(0,self.pop_num,2): # From 0 to pop_num with step 2. That is. here we take pop_num / 2 pairs of parents.\n self.mother.A=self.population[tt+int(self.pop_num/2)].A # Let the last pop_num / 2 individuals of our population be our mothers.\n self.father.A=self.population[tt].A # And let first pop_num / 2 individuals of our population be dads.\n \n tt=tt+1 \n ran=random.random()\n\n for n in range(self.length): # Crossover.\n if random.random()>0.5:\n self.son1.A[n] = self.father.A[n]\n self.son2.A[self.length-1-n] = self.father.A[n]\n self.son3.A[n] = self.mother.A[n]\n self.son4.A[self.length-1-n] = self.mother.A[n]\n else:\n self.son1.A[n] = self.mother.A[n]\n self.son2.A[self.length-1-n] = self.mother.A[n]\n self.son3.A[n] = self.father.A[n]\n self.son4.A[self.length-1-n] = self.father.A[n]\n\n self.par_and_sons[self.pop_num+2*s].A = self.son1.A.copy()\n self.par_and_sons[self.pop_num+2*s+1].A = self.son2.A.copy()\n self.par_and_sons[self.pop_num+2*s+2].A = self.son3.A.copy()\n self.par_and_sons[self.pop_num+2*s+3].A = self.son4.A.copy()", "def nsi_cross_betweenness(self, node_list1, node_list2):\n return self.nsi_interregional_betweenness(sources=node_list1,\n targets=node_list2)", "def test_cross():\n assert_equal(cross(Vector(1, 0, 0), Vector(0, 1, 0)), Vector(0, 0, 1))\n assert_equal(cross(Vector(1, 3, 2), Vector(-1, 1, 0)), Vector(-2, -2, 4))", "def cross_over(ind1, ind2):\n \n network1 = ind1.network\n network2 = ind2.network\n \n size = min(len(network1.index), len(network2.index))\n cx = random.randint(1, size - 1)\n \n temp = network1.copy()\n temp.iloc[:cx,:cx] = network2.iloc[:cx,:cx]\n network2.iloc[:cx,:cx] = network1.iloc[:cx,:cx]\n network1 = temp \n \n ind1.network = network1\n ind2.network = network2\n ind1.age = 1\n ind2.age = 1\n \n return ind1, ind2", "def get_intersecting_dissemination_ids(cross_section, dissemination_areas):\n assert 'DAUID' in cross_section.columns \n dissem_arr = dissemination_areas.loc[dissemination_areas['DAUID'].isin(np.unique(cross_section['DAUID'].values))].DAUID.unique()\n reg_arr = dissemination_areas.loc[dissemination_areas['DAUID'].isin(np.unique(cross_section['DAUID'].values))].CSDUID.unique()\n# code_arr = dissemination_areas.loc[dissemination_areas['DAUID'].isin(np.unique(cross_section['DAUID'].values))].CODEID.unique()\n\n\n return list(dissem_arr), list(reg_arr)", "def crossOver(self, x, y):\n if random.uniform(0, 1) < self.probCrossOver:\n # generate berapa banyak perpindahan\n pindah = random.randint(0, self.panjangKromosom-1)\n for i in range(pindah):\n # melakukan swap nilai x dan y\n x[i], y[i] = y[i], x[i]\n return [x, y]", "def lines_cross(x1, y1, x2, y2, x3, y3, x4, y4):\n a1 = orientation(x1,y1, x3,y3, x4,y4)\n a2 = orientation(x2,y2, x3,y3, x4,y4)\n b1 = orientation(x3,y3, x1,y1, x2,y2)\n b2 = orientation(x4,y4, x1,y1, x2,y2)\n return ((a1 != a2 and b1 != b2)\\\n or ((x1 == x3 and y1 == y3) or (x2 == x3 and y2 == y3)\\\n or (x1 == x4 and y1 == y4) or (x2 == x4 and y2 == y4))\\\n or ((a1 == math.pi and a2 == math.pi)\\\n or b1 == math.pi and b2 == math.pi))", "def nsi_cross_transitivity(self, node_list1, node_list2):\n return _nsi_cross_transitivity(\n to_cy(self.adjacency + np.eye(self.N, dtype=ADJ), ADJ),\n np.array(node_list1, dtype=NODE),\n np.array(node_list2, dtype=NODE),\n to_cy(self.node_weights, DWEIGHT))", "def cross(vec1, vec2):\n result = np.zeros(3)\n return cross_(vec1, vec2, result)", "def crossing(self, *args):\n return self.phy2abs.crossing(*args)", "def cross_degree(self, node_list1, node_list2, link_attribute=None):\n if self.directed:\n return (self.cross_indegree(node_list1, node_list2,\n link_attribute)\n + self.cross_outdegree(node_list1, node_list2,\n link_attribute))\n else:\n return self.cross_outdegree(node_list1, node_list2,\n link_attribute)", "def crossover(p1, p2):\n genotype = []\n \n #Your code here\n \n return {'genotype': genotype, 'fitness': None}", "def cross(length = 10, width = 3, layer = 0):\n D = Device(name = 'cross')\n R = rectangle(size = (width, length), layer = layer)\n r1 = D.add_ref(R).rotate(90)\n r2 = D.add_ref(R)\n r1.center = (0,0)\n r2.center = (0,0)\n return D", "def crosser(self,parents):\n sib1 = parents[0].clone(); sib2 = parents[1].clone()\n sis = sib1.root; bro = sib2.root\n tries = 50 #try 50 times to find a compatible cross symbol\n tried_sym = []\n for i in range(tries):\n sym,node_a = dict_choice(sis.symbol_table)\n if not self.bad_cross_point(sym) and bro.symbol_table.has_key(sym):\n break\n elif i == (tries - 1):\n msg = \"chosen symbol not found in dad (%s tries)\" % `tries`\n raise SymbolError, msg\n else: tried_sym.append(sym)\n node_b = prng.choice(bro.symbol_table[sym])\n idx = 0\n try:\n for child in node_a.get_parent().children():\n if node_a is child: break\n else: idx = idx + 1\n node_a.get_parent().children()[idx] = node_b\n idx = 0\n for child in node_b.get_parent().children():\n if node_b is child: break\n else: idx = idx + 1\n except AttributeError:\n print 'symbol:',sym\n raise NoneError\n node_b.get_parent().children()[idx] = node_a\n #now get nodes pointing at the correct parents\n temp = node_a.get_parent()\n node_a.set_parent(node_b.get_parent())\n node_b.set_parent(temp)\n sib1.evaluated = 0; sib2.evaluated = 0\n if self.cross_point.has_key(sym):\n self.cross_point[sym] = self.cross_point[sym] + 1\n else: self.cross_point[sym] = 1\n return sib1,sib2", "def single_crossover(self, original1, original2):\n point=self.r.uniform(0.1,0.6)\n cut1=int(point*len(original1))\n cut2=int(point*len(original2))\n child1=original1[:cut1]+original2[cut2:]\n child2=original2[:cut2]+original1[cut1:]\n return child1, child2", "def find_p_cross(self):\n\n # initial values\n a = 0.\n b = 1.\n err = 1.\n\n while err > 1e-3:\n p = 0.5 * (a + b)\n self.compute_clusters(p)\n if self.is_crossed():\n b = p\n else:\n a = p\n err = abs(a - b)\n\n return p", "def test_crossgti1(self):\n gti1 = np.array([[1, 4]])\n gti2 = np.array([[2, 5]])\n newgti = cross_gtis([gti1, gti2])\n\n assert np.all(newgti == [[2, 4]]), 'GTIs do not coincide!'", "def get_covariate_pairs(self):\n if self.covariate_field not in self.matrix.obs.columns:\n raise ValueError(\"Covariate value not available in dataset\")\n from itertools import product\n covariate = set(self.matrix.obs[self.covariate_field])\n return product(covariate, covariate)", "def cross(self, other):\n if self.x == other.x:\n if self.x == 0:\n return other\n else:\n cross = getcopy(self)\n for row in other.a:\n cross.newrow(row)\n cross.newrow([self.prepare(1.0)]*cross.x)\n out = cross.new(1)\n for x in xrange(0, out.x):\n out.store(0,x, cross.minor(cross.y-1, x).det())\n return out\n else:\n raise IndexError(\"Matrix cross product invalid for dimensions \"+str(self.y)+\"x\"+str(self.x)+\" and \"+str(other.y)+\"x\"+str(other.x))", "def cross(v1, v2):\n return np.cross(v1, v2)", "def cross_below_cross_rate(self):\n p1_index = randint(0, floor(self.population_size * self.cross_rate)-1)\n p2_index = randint(0, floor(self.population_size * self.cross_rate)-1)\n gene_of_p1 = self.population[p1_index]\n gene_of_p2 = self.population[p2_index]\n cross_point = randint(0, int_min(len(gene_of_p1), len(gene_of_p2))-1)\n new_chromosome = []\n new_chromosome += gene_of_p1.chromosome[:cross_point]\n new_chromosome += gene_of_p2.chromosome[cross_point:]\n if (self.tactics.is_unrepeatable(new_chromosome[cross_point])\n and cross_point < len(new_chromosome)-1):\n if new_chromosome[cross_point] == new_chromosome[cross_point+1]:\n del new_chromosome[cross_point]\n return Gene(chromosome=new_chromosome)", "def cross_detect2(self):\n for agent_idx in range(self.agent_num):\n\n agent = self.agent_list[agent_idx]\n for object_idx in range(len(self.map['objects'])):\n object = self.map['objects'][object_idx]\n\n if not object.can_pass():\n continue\n else:\n #print('object = ', object.type)\n if object.color == 'red' and object.check_cross(self.agent_pos[agent_idx], agent.r):\n\n agent.color = 'red'\n agent.finished = True #when agent has crossed the finished line\n agent.alive = False #kill the agent when finishing the task", "def cross_indegree(self, node_list1, node_list2, link_attribute=None):\n if link_attribute is None:\n return np.sum(self.cross_adjacency(node_list2, node_list1), axis=0)\n else:\n return np.sum(self.cross_link_attribute(link_attribute, node_list2,\n node_list1), axis=0)", "def cross(x, y):\n x = x.reshape(3)\n y = y.reshape(3)\n z = np.cross(x, y)\n z = z.reshape((3, 1))\n return z", "def cross(self, mother, father):\n\n cross_method = 2\n child1 = []\n child2 = []\n if cross_method == 1:\n locus = random.randint(1, self.chromosome_length - 1)\n\n for i in range(self.chromosome_length):\n if i < locus:\n child1.append(mother[i])\n child2.append(father[i])\n else:\n child1.append(father[i])\n child2.append(mother[i])\n else:\n parent = True\n\n locus_count = math.floor(self.chromosome_length / self.cross_rate)\n locus = []\n for i in range(int(locus_count)):\n tmp = 0\n while tmp in locus:\n random.randint(1, self.chromosome_length - 1)\n for i in range(self.chromosome_length):\n if i in locus:\n parent = not parent\n if parent:\n child1.append(mother[i])\n child2.append(father[i])\n else:\n child1.append(father[i])\n child2.append(mother[i])\n\n return [child1, child2]", "def cross(triangles):\n vectors = np.diff(triangles, axis=1)\n crosses = np.cross(vectors[:, 0], vectors[:, 1])\n return crosses", "def identity():\n # We generate a name, an address, add them together and return that\n name = full_name()\n place_of_residence = address()\n new_identity = name + \", \" + place_of_residence\n return new_identity", "def testGetCrossConnects(self):\n\n self.oxc.get_crossconnects(file_name = 'get_crossconnects.xml')", "def CrossoverOX1(p1,p2):\n countryNo=len(p1)\n [start,end] = sorted(random.sample(range(1,countryNo),2))\n ch1 = [0]+[-1 for i in range(1,len(p1))]\n ch2 = [0]+[-1 for i in range(1,len(p1))]\n for i in range(1,countryNo):\n if i>=start and i<=end:\n ch1[i]=p1[i]\n ch2[i]=p2[i]\n for i in range(1,countryNo):\n if p2[i] not in ch1:\n ch1[ch1.index(-1)]=p2[i]\n for i in range(1,countryNo):\n if p1[i] not in ch2:\n ch2[ch2.index(-1)]=p1[i]\n return ch1, ch2", "def cross(a, b):\n c1 = a[1]*b[2] - a[2]*b[1]\n c2 = a[2]*b[0] - a[0]*b[2]\n c3 = a[0]*b[1] - a[1]*b[0]\n return sp.array([c1,c2,c3])", "def get_req_ids(actual_pose, target, req_ids, person_ids):\n train_x = []\n train_y = []\n\n for i in req_ids:\n id_mask = (person_ids == i)\n train_x.append(actual_pose[id_mask])\n train_y.append(target[id_mask, 0])\n\n train_x = np.concatenate(train_x)\n train_y = np.concatenate(train_y)\n \n return train_x, train_y", "def cross(o, a, b):\r\n xo, yo = o\r\n xa, ya = a\r\n xb, yb = b\r\n return (xa - xo)*(yb - yo) - (ya - yo)*(xb - xo)", "def double_crossover(self, original1, original2):\n point1=self.r.uniform(0.1,0.3)\n point2=self.r.uniform(0.6,0.8)\n len1=len(original1)\n len2=len(original2)\n cut11=int(point1*len1)\n cut12=int(point2*len1)\n cut21=int(point1*len2)\n cut22=int(point2*len2)\n child1=original1[:cut11]+original2[cut21:cut22]+original1[cut12:]\n child2=original2[:cut21]+original1[cut11:cut12]+original2[cut22:]\n return child1, child2", "def testCross(self):\n v1 = Vector(1, 0, 0)\n v2 = Vector(0, 1, 0)\n assert v1.cross(v2) == [0, 0, 1]\n assert v1.cross([0, 1, 0]) == Vector(0, 0, 1)\n\n v3 = Vector(-1, 0, 0)\n assert v2.cross(v3) == [0, 0, 1]\n\n assert Vector(0, 0, 1).cross(Vector(1, 0, 0)) == Vector(0, 1, 0)\n c = 0.707106781 # Cos 45\n assert Vector(0, 0, 3).cross(Vector(2*c, 0, 2*c)) == Vector(\n 0, 6*c, 0)\n\n c = 0.5 # cos 60deg\n s = 0.866025404 # sin 60deg\n assert Vector(0, 0, 3).cross(Vector(s, 0, c)) == Vector(0, 3*s, 0)\n assert Vector(0, 0, 3).cross([s, 0, c]) == [0, 3*s, 0]\n\n hitException = False\n try:\n v1 = Vector(1, 2, 3, 4)\n v2 = Vector(5, 6, 7, 8)\n v3 = v1.cross(v2)\n except IndexError:\n hitException = True\n assert hitException", "def crossover(parent1, parent2):\n child = parent1.clone()\n for k in range(parent1.num_input + parent1.num_output):\n if np.random.randint(2) == 1:\n child.identifiers[k] = parent2.identifiers[k]\n child.inhibitors[k] = parent2.inhibitors[k]\n child.enhancers[k] = parent2.enhancers[k]\n\n child.identifiers = child.identifiers[:(child.num_input +\n child.num_output)]\n child.inhibitors = child.inhibitors[:(child.num_input + child.num_output)]\n child.enhancers = child.enhancers[:(child.num_input + child.num_output)]\n\n p1range = list(range(parent1.num_input + parent1.num_output,\n parent1.size()))\n random.shuffle(p1range)\n p2range = list(range(parent2.num_input + parent2.num_output,\n parent2.size()))\n random.shuffle(p2range)\n\n p1remaining = deepcopy(p1range)\n\n # Crossing regulatory\n p1_gene_count = 0\n p2_gene_count = 0\n for p1idx in p1range:\n min_dist = config.CROSSOVER_THRESHOLD\n paired_idx = None\n for p2idx in p2range:\n gdist = parent1.protein_distance(parent2, p1idx, p2idx)\n if gdist < min_dist:\n min_dist = gdist\n paired_idx = p2idx\n if paired_idx is not None:\n if np.random.randint(2) == 0:\n chosen_parent = parent1\n chosen_idx = p1idx\n p1_gene_count += 1\n else:\n chosen_parent = parent2\n chosen_idx = p2idx\n p2_gene_count += 1\n child.identifiers = np.append(\n child.identifiers, chosen_parent.identifiers[chosen_idx])\n child.inhibitors = np.append(\n child.inhibitors, chosen_parent.inhibitors[chosen_idx])\n child.enhancers = np.append(\n child.enhancers, chosen_parent.enhancers[chosen_idx])\n # Remove from consideration again\n p2range = list(set(p2range) - set([p2idx]))\n p1remaining = list(set(p1remaining) - set([p1idx]))\n\n # Add remaining material\n if child.size() == (child.num_input + child.num_output):\n prob = 0.5\n else:\n prob = p1_gene_count / (p1_gene_count + p2_gene_count)\n\n chosen_parent = parent2\n chosen_range = p2range\n if np.random.random() < prob:\n chosen_parent = parent1\n chosen_range = p1remaining\n\n for idx in chosen_range:\n child.identifiers = np.append(child.identifiers,\n chosen_parent.identifiers[idx])\n child.inhibitors = np.append(child.inhibitors,\n chosen_parent.inhibitors[idx])\n child.enhancers = np.append(child.enhancers,\n chosen_parent.enhancers[idx])\n\n child.num_regulatory = child.size() - (child.num_input + child.num_output)\n\n # Cross dynamics\n if np.random.random() < 0.5:\n child.beta = parent1.beta\n else:\n child.beta = parent2.beta\n\n if np.random.random() < 0.5:\n child.delta = parent1.delta\n else:\n child.delta = parent2.delta\n\n return child", "def nsi_cross_local_clustering(self, node_list1, node_list2):\n nodes1 = np.array(node_list1, dtype=NODE)\n nodes2 = np.array(node_list2, dtype=NODE)\n nsi_cc = np.zeros(nodes1.shape, dtype=DFIELD)\n _nsi_cross_local_clustering(\n to_cy(self.adjacency + np.eye(self.N, dtype=ADJ), ADJ),\n nsi_cc, nodes1, nodes2, to_cy(self.node_weights, DWEIGHT))\n\n norm = self.nsi_cross_degree(nodes1, nodes2) ** 2\n nsi_cc[norm != 0] = nsi_cc[norm != 0] / norm[norm != 0]\n nsi_cc[norm == 0] = 0\n return nsi_cc", "def single_point_crossover(population):\r\n global decryption_key\r\n\r\n decryption_key += single_point_crossover_del\r\n\r\n new_population = []\r\n for i in range(0, len(population) - 1, 2):\r\n candidate1 = population[i]\r\n candidate2 = population[i + 1]\r\n\r\n # chromosomes have the same length\r\n # choose a random point\r\n length = len(candidate1)\r\n crossover_point = random.randint(0, length - 1)\r\n\r\n decryption_key += str(crossover_point) + \"|\"\r\n\r\n offspring1 = candidate2[0: crossover_point] + candidate1[crossover_point:]\r\n offspring2 = candidate1[0: crossover_point] + candidate2[crossover_point:]\r\n new_population.append(offspring1)\r\n new_population.append(offspring2)\r\n\r\n # append last chromosome if odd population size\r\n if len(population) % 2 == 1:\r\n new_population.append(population[len(population) - 1])\r\n\r\n decryption_key += single_point_crossover_del\r\n\r\n return new_population", "def crossover(NN1, NN2, p_c, p_m):\n if np.random.choice([0, 1], p=[1-p_c, p_c]):\n return nn.mate_neural_nets(NN1, NN2, p_m)\n else:\n return np.random.choice([NN1, NN2])", "def cross_product(p0,p1,p2):\n\treturn (((p1[0]-p0[0])*(p2[1]-p0[1]))-((p2[0]-p0[0])*(p1[1]-p0[1])))", "def crossProduct(p1, p2, p3):\n return (\n -(p1[1]*p2[0]) + p1[0]*p2[1] +\n p1[1]*p3[0] - p2[1]*p3[0] -\n p1[0]*p3[1] + p2[0]*p3[1]\n )", "def cross(self, other):\n \n return self.x * other[1] - self.y * other[0]", "def process_cross(self, cat1, cat2):\n self.logger.info('Starting process NK cross-correlations for cats %s, %s.',\n cat1.name, cat2.name)\n f1 = cat1.getNField(self.min_sep,self.max_sep,self.b,self.split_method)\n f2 = cat2.getKField(self.min_sep,self.max_sep,self.b,self.split_method)\n\n if f1.sphere != f2.sphere:\n raise AttributeError(\"Cannot correlate catalogs with different coordinate systems.\")\n\n if f1.sphere:\n _treecorr.ProcessCrossNKSphere(self.corr, f1.data, f2.data, self.output_dots)\n else:\n _treecorr.ProcessCrossNKFlat(self.corr, f1.data, f2.data, self.output_dots)", "def crossProduct4( set1, set2 ):\n set1 = asarray( set1, _aformat(set1))\n set1 = reshape( set1, (-1, 4))\n set2 = asarray( set2, _aformat(set1))\n set2 = reshape( set2, (-1, 4))\n result = zeros( (len(set1),4), _aformat(set1))\n result[:,:3] = cross( set1[:,:3],set2[:,:3])\n result[:,3] = 1.0\n return result", "def cross(a, b):\n #return np.cross(a,b)\n\n return vector(a[1] * b[2] - a[2] * b[1],\n a[2] * b[0] - a[0] * b[2],\n a[0] * b[1] - a[1] * b[0])", "def test_cross(self):\n self.assertEqual(solution.cross(solution.ROWS, solution.COLS), self.boxes)", "def cross(self, other):\n return self.x*other[1] - self.y*other[0]", "def member_crossover(population):\n gene1 = population[random.randint(0, int(len(population) - 1))]\n gene2 = population[random.randint(0, int(len(population) - 1))]\n split = random.randint(1, int(len(population[0]) - 1))\n new_gene1 = gene1[:split] + gene2[split:]\n new_gene2 = gene2[:split] + gene1[split:]\n\n return new_gene1, new_gene2", "def crossover(self):\n print(' - crossover')\n s = time.time()\n\n # make a list with all index\n tmp_list = list(range(0, self.size))\n while len(tmp_list) > 0:\n candidate_1 = random.choice(tmp_list)\n tmp_list.remove(candidate_1)\n candidate_2 = random.choice(tmp_list)\n tmp_list.remove(candidate_2)\n\n # ceck if the two candidates will crossover\n chance = random.uniform(0, 1)\n if chance <= self.crossover_rate:\n self.crossover_two_candidates(candidate_1, candidate_2)\n\n e = time.time()\n print(\" - time: \", e - s)", "def cross_reference(self, model: BDF) -> None:\n msg = ', which is required by GRAV sid=%s' % self.sid\n self.cid_ref = model.Coord(self.cid, msg=msg)", "def crossProduct( set1, set2):\n set1 = asarray( set1, _aformat(set1))\n set1 = reshape( set1, (-1, 3))\n set2 = asarray( set2, _aformat(set2))\n set2 = reshape( set2, (-1, 3))\n return cross( set1, set2 )", "def clause_crossover_1x(ind1, ind2):\n k = len(ind1)\n cx_point = random.randint(1, k - 1)\n temp = ind1[cx_point:]\n ind1[cx_point:] = ind2[cx_point:]\n ind2[cx_point:] = temp", "def cross_multiply(x):\n return (x[0][0] * x[1][1]) - (x[0][1] * x[1][0])", "def c_test__cross_inp(self, old_population, population_weighting, run_locals):\r\n return 1", "def cross_reference(self, model: BDF) -> None:\n msg = ', which is required by PLOAD4 sid=%s' % self.sid\n if self.cid is not None:\n self.cid_ref = model.Coord(self.cid, msg=msg)\n if self.g1 is not None:\n self.g1_ref = model.Node(self.g1, msg=msg + '; g1')\n if self.g34 is not None:\n self.g34_ref = model.Node(self.g34, msg=msg + '; g34')\n if self.eids:\n self.eids_ref = model.Elements(self.eids, msg=msg)", "def get_id_from_coor(self, x, y):\n x_coor = x // self._cell_dim\n y_coor = y // self._cell_dim\n return (x_coor, y_coor)", "def makeIdentity(self) -> None:\n ...", "def crossover (self, p1, p2, p_pop, c1, c2, c_pop) :\n assert self.crossover_count < self.pop_size\n assert self.get_iteration () == self.last_gen\n self.parents.append (p1)\n self.parents.append (p2)\n self.crossover_count += 2\n if self.crossover_count == self.pop_size :\n assert (self.get_iteration () == self.last_gen)\n print (self.get_iteration ())\n sys.stdout.flush ()\n self.build_model (p_pop)\n self.sample_model (c1, c2, c_pop)\n self.crossover_count = 0\n self.parents = []\n self.children = {}\n self.last_gen += 1\n self.clear_cache ()", "def cross_betweenness(self, node_list1, node_list2):\n return self.interregional_betweenness(sources=node_list1,\n targets=node_list2)", "def cross_reference(self, model: BDF) -> None:\n msg = ', which is required by ACCEL sid=%s' % self.sid\n self.cid_ref = model.Coord(self.cid, msg=msg)", "def CrossConnect(object):\n source = None # The source interface\n destination = None # The destination interface. For multicast, use multiple CrossConnect instances.\n sourceLabels = None # a Labelset with the allowed labels for the source interface, for this particular cross connect\n # (must be a subset of the allowed labelset for the source interfaces).\n destinationLabel = None # a single label for the source interface\n # A None value means that the interfaces has the \"None\" label.\n sourceLabel = None # a Labelset with the allowed labels for the source interface, for this particular cross connect\n # (must be a subset of the allowed labelset for the source interfaces)\n destinationLabel = None # a single label for the destination interface\n pass", "def test_crossgti2(self):\n gti1 = np.array([[1, 2], [4, 5], [7, 10], [11, 11.2], [12.2, 13.2]])\n gti2 = np.array([[2, 5], [6, 9], [11.4, 14]])\n newgti = cross_gtis([gti1, gti2])\n\n assert np.all(newgti == [[4.0, 5.0], [7.0, 9.0], [12.2, 13.2]]), \\\n 'GTIs do not coincide!'", "def keynesian_cross(T, I, G, C):\n # The data vector to be plotted for production and aggregate expenditure:\n Y_arrey = np.linspace(0,300)\n PE_arrey = (C * (Y_arrey - T) + I + G)\n degree = Y_arrey\n\n # The figure\n fig = plt.figure(figsize=(10,5))\n ax = fig.add_subplot(1,1,1)\n\n ax.plot(Y_arrey, degree, label=\"45-degree line\", color='lightblue',linewidth=3)\n ax.plot(Y_arrey, AD_arrey, label=\"AD=C+I+G+NX\", color='darkorange',linewidth=3)\n\n ax.set_xlabel(\"Y\")\n ax.set_ylabel(\"PE\")\n ax.legend(loc=\"upper left\")\n\n ax.grid()\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.spines['left'].set_visible(False)\n return", "def crossing_number(point:tuple, edges:list, include_edges:bool=True)->int:\n crossing_number = 0\n for edge in edges:\n d_y, d_x, b = line_equation(edge)\n if include_edges and point_on_line(point, edge, d_y, d_x, b):\n return 1\n if is_horizontal(edge):\n continue\n if intersects_right(point, edge, d_y, d_x, b, positive_slope(edge), include_edges):\n crossing_number += 1\n return crossing_number", "def pinp_crossing(point:tuple, edges:list, include_edges:bool=True)->bool:\n return crossing_number(point, edges, include_edges) % 2 == 1", "def cross(a,b):\n \n return [ a[1]*b[2] - a[2]*b[1],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0],\n 1.0 ]", "def pinp_multiple_crossing(points, edges, include_edges = True):\n crossing_number = []\n initialized = False\n for edge in edges:\n d_y, d_x, b = line_equation(edge)\n index = -1\n for point in points:\n index += 1\n if not initialized:\n crossing_number.append([0, False])\n elif crossing_number[index][1]:\n continue\n if include_edges and point_on_line(point, edge, d_y, d_x, b):\n # If the point is on the edge, then we know it is in the polygon.\n crossing_number[index] = [1, True]\n continue\n if is_horizontal(edge):\n # We ignore horizontal edges (unless points are on them, as above).\n continue\n if intersects_right(point, edge, d_y, d_x, b, positive_slope(edge), include_edges):\n crossing_number[index][0] += 1\n initialized = True\n index = 0\n for point in points:\n if crossing_number[index] % 2 == 1:\n yield point", "def cross_section(eps, E, theta):\n\n def beta_func(eps, E, theta):\n\n \"\"\"\n Return the parameter beta (formula 4 from Gould's article)\n\n Parameters:\n eps : energy of the target photon (eV)\n E : energy of the gamma photon (eV)\n theta : angle between the two momenta of the two photons (rad)\n \"\"\"\n\n def parameter_s(eps, E, theta):\n\n \"\"\"\n Return the parameter s (formula 3 from Gould's article)\n\n Parameters:\n eps : energy of the target photon (eV)\n E : energy of the gamma photon (eV)\n theta : angle between the two momenta of the two photons (rad)\n \"\"\"\n\n s = eps*E/(2*(mc2*keV2eV)**2)*(1-np.cos(theta))\n ind = np.where(s>=1) #for pair production to occur, s>=1 and if s=1, it is the threshold condition.\n\n return s, ind\n\n s, ind = parameter_s(eps, E, theta)\n s = s[ind[0]]\n\n return np.sqrt(1-1/s), ind\n\n beta, ind = beta_func(eps, E, theta)\n\n return 1/2.0 * np.pi * r0**2 * (1-beta**2)*((3-beta**4)*np.log((1+beta)/(1-beta))-2*beta*(2-beta**2)), ind", "def CrossCheck(dataloader):", "def _cross_over(self,mp,cross_rate,eta):", "def cross_product(col_set1, col_set2=None):\n if col_set2 is None:\n col_set2 = col_set1\n table = []\n for col1 in col_set1:\n row = []\n for col2 in col_set2:\n cov = covariance(col1.data, col2.data, col1.mask, col2.mask)\n # if abs(cov) > 0.1 and cov != 1.00000:\n print(col1.name, ' v.s. ', col2.name, '\\t', '{:+.5f}'.format(cov))\n row.append(cov)\n table.append(row)\n return table", "def getCrossingAngleEC (self):\n \n Axes = []\n \n Axes = [[tmhelix.ECAxis_X,tmhelix.ECAxis_Y,tmhelix.ECAxis_Z] for tmhelix in self.tmhelixmodel_set]\n \n CrossingAngleEC = SetOfVectors([Axes[0], Axes[1] ]) .AngleDEG ()\n \n return", "def cross_reference(self, model: BDF) -> None:\n load_ids2 = []\n msg = ', which is required by CLOAD=%s' % (self.sid)\n for load_id in self.load_ids:\n if load_id == self.sid:\n msg = 'Type=%s sid=%s load_id=%s creates a recursion error' % (\n self.type, self.sid, load_id)\n raise RuntimeError(msg)\n #print(model.load_combinations)\n\n load_id2 = []\n for loadset, load_combinations in model.load_combinations.items():\n for load in load_combinations:\n if load.type in ['CLOAD']:\n continue\n if load_id == load.excite_id:\n load_id2.append(load)\n\n #load_id2 = model.Load(load_id, consider_load_combinations=True, msg=msg)\n assert isinstance(load_id2, list), load_id2\n assert len(load_id2) > 0, f'could not find references for CLOAD load_id={load_id}'\n load_ids2.append(load_id2)\n self.load_ids_ref = load_ids2", "def photo_cross(Z, ion, E, datfil=None, silent=False):\n assert False # USE LINETOOLS\n # Read data\n if datfil == None:\n datfil = xa_path+'/data/atomic/verner96_photoion_table1.dat'\n dat = ascii.read(datfil)\n\n # Deal with Units\n if not isinstance(E,u.quantity.Quantity):\n if silent is False: print('photo_cross: Assuming eV for input energy')\n E = E * u.eV\n\n # Match\n #pdb.set_trace()\n mt = np.where((Z == dat['Z']) & (ion == dat['N']))[0]\n nmt = len(mt)\n if nmt == 0:\n raise ValueError('photo_cross: %d,%d pair not in our table' % (Z,ion))\n idx = mt[0]\n #\n x = E/(dat['E0'][idx]*u.eV) - dat['y0'][idx]\n y = np.sqrt(x**2 + dat['y1'][idx]**2)\n\n F = (((x-1.)**2 + dat['yw'][idx]**2) * y**(0.5*dat['P'][idx] - 5.5) * \n (1 + np.sqrt(y/dat['ya'][idx]) )**(-1.*dat['P'][idx]))\n\n sigma = dat['s0'][idx] * F * 1e-18 * u.cm**2 \n\n # Energy threshold\n low = np.where(E < dat['Eth'][idx]*u.eV)[0]\n if len(low) > 0: sigma[low] = 0.\n\n return sigma", "def checkPercolation(pcs):\n o = Options()\n crossings = [[i] for i in range(len(pcs))]\n for i in range(len(pcs)):\n for j in range(i + 1, len(pcs)):\n pc1 = pcs[i]\n pc2 = pcs[j]\n if disksInTheShellCross(pc1, pc2):\n print(i, j)\n crossings[i].append(j)\n crossings[j].append(i)\n for j in range(len(crossings)):\n for k in range(len(crossings[j])):\n if k == 0:\n continue\n else:\n for element in crossings[j]:\n if element not in crossings[crossings[j][k]]:\n crossings[crossings[j][k]].append(element)\n toPop = []\n for i in range(len(crossings)):\n if len(crossings[i]) == 1:\n if not i in toPop:\n toPop.append(i)\n for i in toPop[::-1]:\n crossings.pop(i)\n for i, crossing in enumerate(crossings):\n crossings[i] = set(crossing)\n toPop = []\n for i in range(len(crossings)):\n for j in range(i + 1, len(crossings)):\n if crossings[i] - crossings[j] == set():\n if not i in toPop:\n toPop.append(i)\n print(toPop)\n for i in toPop[::-1]:\n crossings.pop(i)\n \n pprint(crossings)\n \n names = []\n for j, crossing in enumerate(crossings):\n names.append([])\n for i in crossing:\n names[j].append(pcs[i].number())\n for i in range(len(pcs)):\n for namesString in names:\n string1 = str(i)\n for j in range(27):\n string2 = '0' * j + string1\n if string1 in namesString and string2 in namesString:\n print(percolation)\n# names = []\n# for i in crossings:\n# for j in i:\n# print(int(pcs[j].number()), end=' ')\n# print()\n return None", "def spm_cross(X, x=None, *args):\n\n if len(args) == 0 and x is None:\n if X.dtype == \"object\":\n Y = spm_cross(*list(X))\n\n elif np.issubdtype(X.dtype, np.number):\n Y = X\n\n return Y\n\n if X.dtype == \"object\":\n X = spm_cross(*list(X))\n\n if x is not None and x.dtype == \"object\":\n x = spm_cross(*list(x))\n\n reshape_dims = tuple(list(X.shape) + list(np.ones(x.ndim, dtype=int)))\n A = X.reshape(reshape_dims)\n\n reshape_dims = tuple(list(np.ones(X.ndim, dtype=int)) + list(x.shape))\n B = x.reshape(reshape_dims)\n\n Y = np.squeeze(A * B)\n\n for x in args:\n Y = spm_cross(Y, x)\n\n return Y", "def _count_concordant_pairs(preds: Tensor, target: Tensor) ->Tensor:\n return torch.cat([_concordant_element_sum(preds, target, i) for i in range(preds.shape[0])]).sum(0)", "def cross(self, other):\n ox, oy = other\n return self[0] * oy - self[1] * ox", "def test_pid_ccs2():\n d = trivariates['anddup']\n pid = PID_CCS(d, [[0], [1], [2]], [3])\n for atom in pid._lattice:\n if atom == ((0,), (1,), (2,)):\n assert pid[atom] == pytest.approx(0.10375937481971094)\n elif atom in [((0,), (2,)), ((1,),)]:\n assert pid[atom] == pytest.approx(0.20751874963942191)\n elif atom in [((2,), (0, 1)), ((0,), (1, 2)), ((0, 1), (0, 2)), ((0, 1), (1, 2)), ((0, 2), (1, 2)), ((0, 1, 2),)]:\n assert pid[atom] == pytest.approx(0.14624062518028902)\n elif atom in [((2,),), ((0,),), ((0, 1), (0, 2), (1, 2)), ((0, 2),)]:\n assert pid[atom] == pytest.approx(-0.14624062518028902)\n else:\n assert pid[atom] == pytest.approx(0.0)", "def crossover(self):\n self.sort_population()\n elite_amount = round(self.elite_rate * self.population_size)\n # preserve from the top\n new_population = [ele for ele in self.population if ele.ttl > 0]\n for individual in new_population:\n if individual.ttl > 0:\n individual.ttl -= 1\n new_population += self.population[:elite_amount]\n\n while len(new_population) < self.population_size:\n # newGene = self.crossBelowCrossRate()\n new_gene, new_gene2 = self.cross_on_arb_seq()\n if random() <= self.mutate_rate:\n self.mutate_append(new_gene)\n new_population.append(new_gene)\n if len(new_population) == self.population_size:\n break\n\n if random() <= self.mutate_rate:\n self.mutate_append(new_gene2)\n new_population.append(new_gene2)\n self.population = new_population", "def crossover(x1,x2):\n for chromo in x1.chromosomes:\n result_chromos = [np.zeros((chromo.shape))]\n #result_chromos = [np.zeros((chromo.shape)) for chromo in x1.chromosomes]\n i = 0\n for j in range(len(x1.chromosomes[i])):\n for k in range(len(x1.chromosomes[i][j])):\n if(np.random.rand(1) < 0.5):\n result_chromos[i][j][k] = x1.chromosomes[i][j][k]\n else:\n result_chromos[i][j][k] = x2.chromosomes[i][j][k]\n if(np.random.rand(1)< 0.8):#at 0.3 very agressive\n result_chromos[i][j][k] += -0.05 + np.random.rand(1)*0.1\n return result_chromos", "def getRequiredCrossNodes(self, nodes: List[int], identifier: int) -> List[Tuple[int, int, int]]:\n\n return []", "def nsi_cross_mean_degree(self, node_list1, node_list2):\n nsi_cross = self.nsi_cross_degree(node_list1, node_list2)\n node_weights = self.node_weights[node_list1]\n W_i = sum(node_weights)\n return sum(nsi_cross * node_weights) / W_i", "def __init__(self, firstParent, secondParent):\n CrossOver.__init__(self, \"Group Point CrossOver\", firstParent, secondParent)", "def __init__(self, cross_table=None, objects=None, attributes=None):\n # if not (isinstance(cross_table, list) and\n # all(isinstance(i, list) for i in cross_table)):\n # try:\n # cross_table = [list(i) for i in cross_table]\n # except:\n # raise NotTableException(cross_table)\n if len(cross_table) != len(objects):\n raise ValueError(\"Number of objects (=%i) and number of cross table\"\n \" rows(=%i) must agree\" % (len(objects), len(cross_table)))\n elif (len(cross_table) != 0) and len(cross_table[0]) != len(attributes):\n raise ValueError(\"Number of attributes (=%i) and number of cross table\"\n \" columns (=%i) must agree\" % (len(attributes),\n len(cross_table[0])))\n _attributes = attributes[:]\n if len(set(attributes)) < len(attributes):\n for att in _attributes:\n if _attributes.count(att) > 1:\n indices = [i for i, x in enumerate(_attributes) if x == att]\n for i in indices:\n _attributes[i] = str(att) + '_{}'.format(i)\n message = \"Not unique name of attribute '{}', \".format(att)\n message += \"renamed to '{}_n', n \\in {}\".format(att, indices)\n module_logger.info(message)\n _objects = objects[:]\n if len(set(objects)) < len(objects):\n for obj in _objects:\n if _objects.count(obj) > 1:\n indices = [i for i, x in enumerate(_objects) if x == obj]\n for i in indices:\n _objects[i] = str(obj) + '_{}'.format(i)\n message = \"Not unique name of object '{}', \".format(obj)\n message += \"renamed to '{}_n', n \\in {}\".format(obj, indices)\n module_logger.info(message)\n\n self._objects = _objects\n self._attributes = _attributes\n self.np_table = np.array(cross_table, dtype=bool)\n self.cross_table = self.np_table.tolist()\n self.object_indices = {obj: ind for ind, obj in enumerate(_objects)}\n self.attribute_indices = {att: ind\n for ind, att in enumerate(_attributes)}", "def crossCircle(self, other):\n if self.isCrossingCircle(other):\n s = Segment(self.center, other.center)\n m = s.middle\n n = math.sqrt(self.radius ** 2 - (s.norm / 2) ** 2)\n a = s.angle + math.pi / 2\n v1 = Vector.createFromPolar(n, a)\n v2 = Vector.createFromPolar(n, -a)\n p1 = v1(m)\n p2 = v2(m)\n return [p1, p2]", "def _crossover(self, sel):\n offspring = []\n for p1, p2 in sel:\n p1 = copy.deepcopy(p1)\n p2 = copy.deepcopy(p2)\n\n tmp = self.op.crossover(\n copy.deepcopy(p1['individual']),\n copy.deepcopy(p2['individual']))\n if not tmp[0] is None and not tmp[1] is None:\n c1 = {\n 'individual': tmp[0],\n 'fitness': self.op.fitness(tmp[0])\n }\n c2 = {\n 'individual': tmp[1],\n 'fitness': self.op.fitness(tmp[1])\n }\n\n offspring.append(\n c1 if c1['fitness'] < p1['fitness'] else p1)\n offspring.append(\n c2 if c2['fitness'] < p2['fitness'] else p2)\n else:\n offspring.extend((p1, p2))\n return offspring", "def __init__(self, \n cross_sections, # List of CrossSection objects\n thickness = 1e-3 # Assumed sample thickness [atoms/b = b^(-1)]\n ):\n self.cross_sections = cross_sections\n # Let each CrossSection instance know about its owner #\n for xs in cross_sections:\n xs.experiment = self\n #\n self.thickness = thickness\n # Generate uncertainty information #\n self.generate_unc()\n #", "def crossover(self, candidates):\n xver = []\n for par1, par2 in candidates:\n n = min(par1.enc_path.shape[0], par2.enc_path.shape[0])\n x_point = np.random.randint(0, n - 1)\n child = Path()\n child.enc_path = np.vstack((par1.enc_path[0:x_point], par2.enc_path[x_point:]))\n xver.append(child)\n return xver", "def do_crossval():\n df = read_df()\n # X = df['review'].apply(remove_html_lower)\n\n X = df['review']\n y = df['sentiment']\n X_train, X_holdout, y_train, y_holdout = train_test_split(X, y, test_size=0.3, shuffle=True, stratify=y, random_state=222 )\n\n tfidf = TfidfVectorizer(stop_words='english', min_df=2, max_df=0.8, ngram_range=(1,4))\n stem_pipeline = make_pipeline(TextNormalizer(), tfidf, LogisticRegression(C=100))\n cv = StratifiedShuffleSplit(n_splits=3, test_size=0.2)\n\n scores = cross_val_score(stem_pipeline, X_train, y_train, cv=cv, scoring='accuracy', n_jobs=-1)\n print(scores, scores.mean())", "def general_cross_function(verbosity, function):\r\n ret = 1\r\n first_errors = [False, False]\r\n for count in range(10, 25, 5):\r\n for points in range(5, 10):\r\n for ax_c in range(3, 5):\r\n axes = []\r\n for _ in range(ax_c):\r\n axes.append(((np.random.random_sample() * 2), (3 + np.random.random_sample() * 4)))\r\n population = GeneticAlgorithms.random_population(count, points, axes) # assumes this works\r\n for _ in range(len(population)):\r\n rd1 = np.random.choice(population)\r\n rd2 = np.random.choice(population)\r\n crs = function(rd1, rd2)\r\n if crs.shape != rd1.shape:\r\n ret = 0\r\n if verbosity > 0 and first_errors[0]:\r\n first_errors[0] = True\r\n print(\"ERROR: cross function doesn't return correct shape\")\r\n for i in range(points):\r\n for j in range(ax_c):\r\n if crs[i][j] < min(rd1[i][j], rd2[i][j]) or crs[i][j] > max(rd1[i][j], rd2[i][j]):\r\n ret = 0\r\n if verbosity > 0 and first_errors[1]:\r\n first_errors[1] = True\r\n print(\"ERROR: cross function doesn't return in correct range\")\r\n return ret", "def cross(p, n):\n # return (p[0] > p[1] and n[0] < n[1]) or (p[0] < p[1] and n[0] > n[1])\n if (p[0] > p[1] and n[0] < n[1]):\n return -1\n elif (p[0] < p[1] and n[0] > n[1]):\n return 1\n\n return 0", "def total_cross_degree(self, node_list1, node_list2):\n return np.mean(self.cross_degree(node_list1, node_list2))", "def crossSelf(self, e=1e-10):\n results = []\n l = len(self.segments)\n for i in range(l):\n for j in range(i + 1, l):\n point = self.segments[i].crossSegment(self.segments[j])\n if point:\n if point in self.points:\n results.append(point)\n return results", "def cross_on_arb_seq(self, slmax=6):\n p1_index = randint(0, floor(self.population_size * self.cross_rate)-1)\n p2_index = randint(0, floor(self.population_size * self.cross_rate)-1)\n gene_of_p1 = self.population[p1_index]\n gene_of_p2 = self.population[p2_index]\n\n p1_begin = myrandint(0, len(gene_of_p1)-1)\n p1_end = p1_begin + myrandint(1, int_min(slmax, len(gene_of_p1)-p1_begin))\n p2_begin = myrandint(0, len(gene_of_p2)-1)\n p2_end = p2_begin + myrandint(1, int_min(slmax, len(gene_of_p2)-p2_begin))\n new_chromosome = []\n new_chromosome += gene_of_p1.chromosome[:p1_begin]\n new_chromosome += gene_of_p2.chromosome[p2_begin:p2_end]\n new_chromosome += gene_of_p1.chromosome[p1_end:]\n new_chromosome2 = []\n new_chromosome2 += gene_of_p2.chromosome[:p2_begin]\n new_chromosome2 += gene_of_p1.chromosome[p1_begin:p1_end]\n new_chromosome2 += gene_of_p2.chromosome[p2_end:]\n self.remove_repeatable(new_chromosome)\n self.remove_repeatable(new_chromosome2)\n return Gene(chromosome=new_chromosome), Gene(chromosome=new_chromosome2)", "def phasesin14xymult_cfix(param, xyord,crossord,t, x, y):\n # 2010-04-27 11:49 IJC: Created\n # 2010-05-28 15:42 IJC: Added x*y cross-terms\n # 2010-07-21 13:02 IJC: switched to a mostly-additive model\n\n param = array(param,copy=True)\n x = array(x,copy=True)\n y = array(y,copy=True)\n t = array(t,copy=True)\n\n xparam = zeros((0,14),float)\n yparam = zeros((0,14),float)\n crossparam = zeros((0,14),float)\n\n cparam = param[3:17].copy()\n # Ensure that prod(1.+cparam) equals zero\n cparam[0] = 1./(1.+cparam[1::]).prod() - 1.\n if xyord>=1:\n for ii in range(xyord):\n xparam = vstack((xparam,param[17+ii*28:31+ii*28]))\n yparam = vstack((yparam,param[31+ii*28:45+ii*28]))\n\n lastxyparamind = 45+(xyord-1)*28\n if crossord>=1:\n for ii in [0]: #range(crossparam):\n crossparam = vstack((crossparam,param[lastxyparamind:lastxyparamind+(ii+1)*14]))\n\n #cparam -= mean(cparam)\n param[2] = param[2] % (2*pi)\n \n if len(t.shape)==1:\n was1d = True\n t = t.reshape(14, len(t)/14.)\n x = x.reshape(14, len(x)/14.)\n y = y.reshape(14, len(y)/14.)\n else:\n was1d = False\n\n # Subtract the mean from the X and Y data\n x -= x.mean(1).reshape(14,1)\n y -= y.mean(1).reshape(14,1)\n\n # Zeroth-order model:\n ret = param[0] - abs(param[1]) *cos(2*pi*t +param[2])\n\n # Apply constant and X/Y offsets:\n #ret *= (1. + tile(cparam, (t.shape[1],1)).transpose())\n offset_term = (1. + tile(cparam, (t.shape[1],1)).transpose())\n if xyord>=1:\n for ii in range(xyord):\n offset_term += tile(xparam[ii], (t.shape[1],1)).transpose()*x**(ii+1)\n offset_term += tile(yparam[ii], (t.shape[1],1)).transpose()*y**(ii+1)\n\n if crossord>=1:\n for ii in [0]: \n offset_term += tile(crossparam[ii], (t.shape[1],1)).transpose()*x*y\n\n # Apply the (1+c+dx+ey) term:\n ret *= offset_term\n\n if was1d:\n ret = ret.ravel()\n\n return ret", "def crss(nds):\n cross_product = []\n nds = nodes[:, array(nds) - 1]\n for i, j, k, l in zip([2, 3, 1, 4, 6, 7, 5, 8], [4, 1, 3, 2, 8, 5, 7, 6], \n [5, 6, 8, 7, 1, 2, 4, 3], [1, 2, 4, 3, 5, 6, 8, 7]):\n p1 = nds[:, i - 1]\n p2 = nds[:, j - 1]\n p3 = nds[:, k - 1]\n p0 = nds[:, l - 1] \n u = p1 - p0\n v = p2 - p0\n w = p3 - p0 \n c = array([u[1]*v[2] - u[2]*v[1],\n u[2]*v[0] - u[0]*v[2],\n u[0]*v[1] - u[1]*v[0]])\n cross_product.append(dot(w, c)) \n \n for i in range(4, 8):\n cross_product[i] = - cross_product[i]\n \n return cross_product", "def cross_reference(self, model: BDF) -> None:\n msg = ', which is required by ACCEL1 sid=%s' % self.sid\n self.cid_ref = model.Coord(self.cid, msg=msg)\n self.nodes_ref = model.EmptyNodes(self.node_ids, msg=msg)" ]
[ "0.58072054", "0.5740432", "0.5660291", "0.5618433", "0.54590535", "0.5400549", "0.53195125", "0.5265565", "0.5245429", "0.5224387", "0.51929295", "0.51769614", "0.5158133", "0.5128788", "0.5099165", "0.50943536", "0.50907815", "0.5070936", "0.50548255", "0.5045334", "0.5039649", "0.5034933", "0.5024791", "0.502387", "0.5023431", "0.5011431", "0.5004628", "0.498899", "0.49820754", "0.49533918", "0.49494874", "0.4947005", "0.49469286", "0.49464047", "0.49344465", "0.4931679", "0.4930966", "0.49257335", "0.49210393", "0.49118453", "0.4903808", "0.4902651", "0.48999175", "0.4897915", "0.4888544", "0.48876384", "0.48821536", "0.4875989", "0.4873253", "0.48612997", "0.48598245", "0.4853053", "0.48495498", "0.48399344", "0.48399246", "0.48381856", "0.4836904", "0.4835746", "0.48329812", "0.4820607", "0.48199177", "0.48171955", "0.48128018", "0.48120433", "0.4806519", "0.48013642", "0.47970137", "0.4792863", "0.4783327", "0.47816926", "0.47803333", "0.47715053", "0.47588786", "0.4754345", "0.47501504", "0.4750122", "0.47453883", "0.4734742", "0.47309932", "0.47268307", "0.47263885", "0.4725202", "0.4712729", "0.47119343", "0.47042468", "0.47027537", "0.4701668", "0.47002813", "0.46933958", "0.46821812", "0.46799394", "0.46717286", "0.46681443", "0.46670458", "0.46658766", "0.46652877", "0.4663695", "0.465893", "0.4650209", "0.46463233" ]
0.5539348
4
Function put_histogram Create the histogram of color of the person under it Color field
def put_histogram(self, img, coords): self.char_color.update(img, coords)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_histogram(self, tag, values, global_step=None, bins='tensorflow'):\n values = make_np(values)\n self.vis.histogram(make_np(values), opts={'title': tag})", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):", "def hist(self):\r\n plt.hist(self.data_array, bins='auto', density=False, facecolor='b')\r\n plt.title(self.column_name)\r\n plt.savefig(self.column_name + \".svg\")\r\n plt.close()", "def pil_hue_histogram(h):\n np_hist = np_hsv_hue_histogram(h)\n pil_hist = util.np_to_pil(np_hist)\n return pil_hist", "def plot_histogram(img):\n rgb_hist = rgb_histogram(img)\n plt.figure()\n for color, hist in rgb_hist.items():\n plt.plot(hist, color=color)\n plt.xlim([0, 256])", "def extract_color_histogram_from_objdata(self):\n color_histograms = []\n imgpaths = get_train_imgpaths(self.obj_name)\n if imgpaths is None:\n return # skip if img does not exists\n progress = progressbar.ProgressBar(widgets=['{o}: '.format(o=self.obj_name),\n progressbar.Bar(), progressbar.Percentage(), ' ', progressbar.ETA()])\n image_pub = rospy.Publisher('image_publisher/output', Image, queue_size=1)\n for raw_path, mask_path in progress(imgpaths):\n raw_img = cv2.imread(raw_path)\n mask_img = cv2.imread(mask_path)\n train_img = cv2.add(mask_img, raw_img)\n\n color_hist_sub = rospy.Subscriber('single_channel_histogram_' + self.color_name + '/output', ColorHistogram, self.color_hist_cb)\n bridge = cv_bridge.CvBridge()\n train_img_msg = bridge.cv2_to_imgmsg(train_img, encoding=\"bgr8\")\n train_img_msg.header.stamp = rospy.Time.now()\n\n self.color_hist = None\n while self.color_hist == None:\n image_pub.publish(train_img_msg)\n rospy.sleep(1)\n color_histograms.append(self.color_hist)\n color_histograms = np.array(color_histograms)\n self.save_histogram_data(color_histograms, self.obj_name)", "def histogram_image(image, method='rgb'):\n histogram = get_color_histogram(image)\n if method == 'hex':\n return {'#{:02X}{:02X}{:02X}'.format(*color): count\n for color, count in histogram.items()}\n else:\n return {'[{}, {}, {}]'.format(r, g, b): count\n for (r, g, b), count in histogram.items()}", "def make_histogram(cadena):\r\n hist = dict()\r\n for c in cadena:\r\n hist[c] = hist.get(c, 0) + 1\r\n \r\n if \"\\n\" in hist:\r\n del hist[\"\\n\"] \r\n return hist", "def rgb_histogram(img, channels=[\"r\", \"g\", \"b\"]):\n hist = {}\n for ii, color in enumerate(channels):\n hist[color] = cv2.calcHist([img], [ii], None, [256], [0, 256])\n return hist", "def getHistogram( self, img):\n bins = 256\n range_scale = [0,254]\n nivel_transparencia = 0.5\n plt.hist(img.ravel(),bins,range_scale, label=\"histogram\", alpha=nivel_transparencia);\n plt.legend(loc='upper right')\n plt.show()", "def color_hist_single(aoi):\n hist = cv2.calcHist([aoi], [0], None, [30], [0, 256])\n return cv2.normalize(hist, hist).flatten()", "def get_color_hist(img, nbins=32, visualize=False):#, bins_range=(0, 1)):\n channel1_hist = np.histogram(img[:, :, 0], bins=nbins)#, range=bins_range)\n channel2_hist = np.histogram(img[:, :, 1], bins=nbins)#, range=bins_range)\n channel3_hist = np.histogram(img[:, :, 2], bins=nbins)#, range=bins_range)\n\n hist_feat = np.concatenate((channel1_hist[0], channel2_hist[0], channel3_hist[0]))\n\n if visualize:\n plot_histfeatures(channel1_hist, channel2_hist, channel3_hist)\n return hist_feat", "def color_histogram_hsv(im, nbin=10, xmin=0, xmax=255, normalized=True):\n ndim = im.ndim\n bins = np.linspace(xmin, xmax, nbin + 1)\n hsv = matplotlib.colors.rgb_to_hsv(im / xmax) * xmax\n imhist, bin_edges = np.histogram(hsv[:, :, 0],\n bins=bins,\n density=normalized)\n imhist = imhist * np.diff(bin_edges)\n\n return imhist", "def calculateHistogram(self):\n \n # Define color map\n colors = [ (255,0,0),(0,255,0),(0,0,255) ]\n # Define empty image to plot histogram in\n plot_to_fill = np.zeros((280,400,3))\n # Define bins of the histogram\n bins = np.arange(256).reshape(256,1)\n \n # Boucle sur les canaux\n for channel, color in enumerate(colors):\n # Calcul de l'histogramme\n hist_item = cv2.calcHist(self.frame,[channel],None,[256],[0,256])\n # Normalisation\n cv2.normalize(hist_item,hist_item,0,255,cv2.NORM_MINMAX)\n # Conversion\n hist = np.int32(np.around(hist_item))\n pts = np.int32(np.column_stack((bins, hist)))\n cv2.polylines(plot_to_fill, [pts], False, color)\n # Mettre dans le bon sens\n histplot = np.flipud(plot_to_fill)\n histplot = np.uint8(histplot)\n \n # Conversion en objet QPixelMap\n self.histplot_qpix = self.convertToQPixelmap(histplot)", "def __get_color_histogram(self, image, seed, hist_res):\n \n L=[]\n N=len(seed)\n for i in range(N):\n \n L.append(image[seed[i][1],seed[i][0]])\n image_part=np.array(L)\n \n \n hist, bins= np.histogramdd(image_part,bins=hist_res,range=((0,255),(0,255),(0,255)) )\n #hist= ndimage.gaussian_filter(hist,sigma=7) # Gaussian smoothing\n\n return hist /np.linalg.norm(hist)", "def color_histogram_hsv(img, nbin=10, xmin=0, xmax=255, normalized=True):\n ndim = img.ndim\n bins = np.linspace(xmin, xmax, nbin+1)\n hsv = matplotlib.color.rgb_to_hsv(img/xmax) * xmax\n imhist, bin_edges = np.histogram(hsv[:, :, 0], bins=bins, density=normalized)\n imhist = imhist * np.diff(bin_edges)\n return imhist", "def create_fixed_hist(self):\n hist = cv2.calcHist([self.obj], [0, 1, 2], None, [32, 8, 8],\n [0, 256, 0, 256, 0, 256])\n self.hist = cv2.normalize(hist).flatten()\n print self.hist", "def compute_histogram(self, image):\n\n hist = [0] * 256\n x, y = image.shape[:2]\n #print(image.shape)\n for i in range(x):\n for j in range(y):\n hist[image[i, j]] += 1\n\n return hist", "def hog_histograms(*args, **kwargs): # real signature unknown\n pass", "def build_histogram(iterator, key):\n buckets = defaultdict(int)\n values = {}\n\n num_objects = 0\n for obj in iterator:\n num_objects += 1\n\n try:\n val = obj[key]\n except (KeyError, TypeError):\n continue\n\n value_hash = hashlib.sha1()\n value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())\n value_hash = value_hash.hexdigest()\n\n buckets[value_hash] += 1\n values[value_hash] = val\n\n return [\n (h, buckets[h], float(buckets[h]) / num_objects, values[h])\n for h in sorted(buckets.keys(), key=lambda k: -buckets[k])\n ]", "def histograma_colorido(imagem, intervalo=(0, 256)):\n \n color = ('b','g','r')\n \n fig, ax = plt.subplots(3,1, figsize=(12,8))\n \n for i,col in enumerate(color):\n histr = cv2.calcHist([imagem],[i],None,[intervalo[1]],[intervalo[0],intervalo[1]])\n ax[i].plot(histr, color = col)\n ax[i].set_xlim([intervalo[0],intervalo[1]])\n# plt.plot(histr,color = col)\n# plt.xlim([intervalo[0],intervalo[1]])\n plt.show()", "def histogram(self, mask=None, extrema=None):\r\n uni, counts = self._getcolors()\r\n return [l for l in counts]", "def initialize_histogram(xx, alpha=1.0, colorV=None, facecolor='#80D080', nbins=75,\n fontsize=8, linewidth=1, xlabel=None, ylabel=None, label=None):\n fig = plt.figure(figsize=(10 * 0.6, 5 * 0.6))\n hist_ax = plt.axes([0.15, 0.25, 0.8, 0.65]) # axes constructor axes([left, bottom, width, height])\n draw_histogram(xx, hist_ax, alpha=alpha, colorV=colorV, facecolor=facecolor, nbins=nbins,\n fontsize=fontsize, linewidth=linewidth, xlabel=xlabel, ylabel=ylabel)\n return fig, hist_ax", "def color_hist(img, nbins=32): # Note : bins_range=(0, 256) from lecture will not work\n # Compute the histogram of the colour channesl separately\n channel1_hist = np.histogram(img[:, :, 0], bins=nbins)\n channel2_hist = np.histogram(img[:, :, 1], bins=nbins)\n channel3_hist = np.histogram(img[:, :, 2], bins=nbins)\n # Concatenate the histograms into a single feature vector\n hist_features = np.concatenate((channel1_hist[0], channel2_hist[0], channel3_hist[0]))\n return hist_features", "def hist_of_numeric(X):\n figsize(10,3)\n for col in get_numeric(X):\n print(col)\n X[col].hist(bins=50)\n show()", "def histo_hash ( self ) :\n return self.__histo_hash", "def histo_hash ( self ) :\n return self.__histo_hash", "def histo_hash ( self ) :\n return self.__histo_hash", "def color_histogram(image, number_of_bins=32):\n # Compute the histogram of the color channels separately\n first_channel = image[:, :, 0]\n second_channel = image[:, :, 1]\n third_channel = image[:, :, 2]\n\n first_channel_histogram = np.histogram(first_channel, bins=number_of_bins)\n second_channel_histogram = np.histogram(second_channel, bins=number_of_bins)\n third_channel_histogram = np.histogram(third_channel, bins=number_of_bins)\n\n # Concatenate the histograms into a single feature vector\n histogram_features = np.concatenate((first_channel_histogram[0], second_channel_histogram[0],\n third_channel_histogram[0]))\n # Return the individual histograms, bin_centers and feature vector\n return histogram_features", "def color_hist(im, col_bins):\n assert im.ndim == 3 and im.shape[2] == 3, \"image should be rgb\"\n arr = np.concatenate((im, color.rgb2lab(im)), axis=2).reshape((-1, 6))\n desc = np.zeros((col_bins * 6,), dtype=np.float)\n for i in range(3):\n desc[i * col_bins:(i + 1) * col_bins], _ = np.histogram(\n arr[:, i], bins=col_bins, range=(0, 255))\n desc[i * col_bins:(i + 1) * col_bins] /= np.sum(\n desc[i * col_bins:(i + 1) * col_bins]) + (\n np.sum(desc[i * col_bins:(i + 1) * col_bins]) < 1e-4)\n\n # noinspection PyUnboundLocalVariable\n i += 1\n desc[i * col_bins:(i + 1) * col_bins], _ = np.histogram(\n arr[:, i], bins=col_bins, range=(0, 100))\n desc[i * col_bins:(i + 1) * col_bins] /= np.sum(\n desc[i * col_bins:(i + 1) * col_bins]) + (\n np.sum(desc[i * col_bins:(i + 1) * col_bins]) < 1e-4)\n for i in range(4, 6):\n desc[i * col_bins:(i + 1) * col_bins], _ = np.histogram(\n arr[:, i], bins=col_bins, range=(-128, 127))\n desc[i * col_bins:(i + 1) * col_bins] /= np.sum(\n desc[i * col_bins:(i + 1) * col_bins]) + (\n np.sum(desc[i * col_bins:(i + 1) * col_bins]) < 1e-4)\n return desc", "def make_histogram(points, bucket_size):\r\n return Counter(bucketize(point, bucket_size) for point in points)", "def addHistogram2D(self, name, title, n_bins_x, minimum_x, maximum_x, n_bins_y, minimum_y, maximum_y):\n\t\tself.histograms[ name ] = ROOT.TH2F(name, title, n_bins_x, minimum_x, maximum_x, n_bins_y, minimum_y, maximum_y)", "def histograma(p):\n img = read_img(p)\n show_histograma(img.reshape((-1)))", "def update_histo_frame():\n min_histo.text = str(MIN_RANGE_F) # Display the legend\n max_histo.text = str(MAX_RANGE_F)\n\n histogram = np.zeros(GRID_AXIS) # Clear histogram accumulation array\n # Collect camera data and calculate the histogram\n for _row in range(0, GRID_AXIS):\n for _col in range(0, GRID_AXIS):\n histo_index = int(map_range(GRID_DATA[_col, _row], 0, 1, 0, GRID_AXIS - 1))\n histogram[histo_index] = histogram[histo_index] + 1\n\n histo_scale = np.max(histogram) / (GRID_AXIS - 1)\n if histo_scale <= 0:\n histo_scale = 1\n\n # Display the histogram\n for _col in range(0, GRID_AXIS):\n for _row in range(0, GRID_AXIS):\n if histogram[_col] / histo_scale > GRID_AXIS - 1 - _row:\n image_group[((_row * GRID_AXIS) + _col)].fill = index_to_rgb(\n round((_col / GRID_AXIS), 3)\n )\n else:\n image_group[((_row * GRID_AXIS) + _col)].fill = BLACK", "def get_hsv_hist(img):\n hsv = cv2.cvtColor(img.data, cv2.COLOR_BGR2HSV)\n\n hue_counts = {}\n hue_pixels = {}\n\n for rnum, row in enumerate(hsv):\n for cnum, pix in enumerate(row):\n hue = pix[0]\n val = pix[2]\n sat = pix[1]\n #ignore white\n if not (sat < cfg.WHITE_FACTOR * cfg.SAT_RANGE):\n #black is its own bin\n if val < cfg.BLACK_FACTOR * cfg.VALUE_RANGE:\n bin_hue = -1\n else:\n non_black_hues = cfg.HUE_VALUES.keys()\n bin_hue = min(non_black_hues, key = lambda h:\n dist_mod(cfg.HUE_RANGE, hue, h))\n\n if bin_hue not in hue_counts:\n hue_counts[bin_hue] = 0\n hue_pixels[bin_hue] = []\n hue_counts[bin_hue] += 1\n hue_pixels[bin_hue].append([rnum, cnum])\n\n return hue_counts, hue_pixels", "def make_histogram(points, bucket_size):\n return Counter(bucketize(point, bucket_size) for point in points)", "def make_histogram(points, bucket_size):\n return Counter(bucketize(point, bucket_size) for point in points)", "def np_hsv_hue_histogram(h):\n figure = plt.figure()\n canvas = figure.canvas\n _, _, patches = plt.hist(h, bins=360)\n plt.title(\"HSV Hue Histogram, mean=%3.1f, std=%3.1f\" % (np.mean(h), np.std(h)))\n\n bin_num = 0\n for patch in patches:\n rgb_color = colorsys.hsv_to_rgb(bin_num / 360.0, 1, 1)\n patch.set_facecolor(rgb_color)\n bin_num += 1\n\n canvas.draw()\n w, h = canvas.get_width_height()\n np_hist = np.fromstring(canvas.get_renderer().tostring_rgb(), dtype=np.uint8).reshape(h, w, 3)\n plt.close(figure)\n util.np_info(np_hist)\n return np_hist", "def calc_histogram(self, img_data):\n\n histogram = [0] * self.color_depth\n\n for w in range(img_data.shape[0]):\n for h in range(img_data.shape[1]):\n pixel = img_data[w][h]\n histogram[pixel] += 1\n\n return histogram", "def create_general_hist(self, obj):\n hist = cv2.calcHist([obj], [0, 1, 2], None, [32, 8, 8],\n [0, 256, 0, 256, 0, 256])\n print cv2.normalize(hist).flatten()\n return cv2.normalize(hist).flatten()", "def color_hist_block(aois):\n hist = cv2.calcHist([aois[0]], [0], None, [30], [0, 256])\n contrasts = []\n for aoi in aois[1:]:\n hist = hist + cv2.calcHist([aoi], [0], None, [30], [0, 256])\n contrasts.append(compute_contrast(aoi))\n hist = cv2.normalize(hist, hist).flatten()\n # print hist,contrasts\n return hist", "def compute_histogram(self, image):\n hist = [0] * 256\n [h, w] = image.shape\n print(h,w)\n i = 0\n while i < 256:\n for row in range(h):\n for col in range(w):\n if image[row, col] == i:\n hist[i] += 1\n #print(hist[i])\n i += 1\n\n return hist", "def histo ( self ,\n xbins = 20 , xmin = None , xmax = None ,\n ybins = 20 , ymin = None , ymax = None ,\n hpars = () , \n histo = None ,\n integral = False ,\n errors = False , \n density = False ) :\n \n \n histos = self.make_histo ( xbins = xbins , xmin = xmin , xmax = xmax ,\n ybins = ybins , ymin = ymin , ymax = ymax ,\n hpars = hpars ,\n histo = histo )\n\n # loop over the historgam bins \n for ix,iy,x,y,z in histo.items() :\n\n xv , xe = x.value() , x.error()\n yv , ye = y.value() , y.error()\n \n # value at the bin center \n c = self ( xv , yv , error = errors ) \n\n if not integral : \n histo[ix,iy] = c\n continue\n\n # integral over the bin \n v = self.integral( xv - xe , xv + xe , yv - ye , yv + ye )\n \n if errors :\n if 0 == c.cov2 () : pass\n elif 0 != c.value() and 0 != v : \n v = c * ( v / c.value() )\n \n histo[ix,iy] = v \n\n ## coovert to density historgam, if requested \n if density : histo = histo.density()\n \n return histo", "def equalizeHist_color(img):\n image = np.empty(img.shape)\n for c in range(img.shape[2]):\n channel = img[:, :, c]\n channel = channel.astype(np.uint8)\n\n # CLAHE\n clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(2, 2))\n channel = clahe.apply(channel)\n\n # http://docs.opencv.org/3.1.0/d5/daf/tutorial_py_histogram_equalization.html\n channel = cv2.equalizeHist(channel)\n try:\n image[:, :, c] = channel\n except Exception as e:\n print(str(e))\n return image", "def histogram(self, field: str, min_value: float, max_value: float, num_of_buckets: int = 5):\n return self.values(histogram=Histogram(field, min_value, max_value, num_of_buckets))", "def histogram_summary(self, tag, values, step, bins=1000):\n self.writer.add_histogram(tag, values, step, bins='auto')", "def __init__(self, param, lower, upper, binCount = 50,\n xscale = None, yweight = None, autoFollow = True):\n logging.debug('Hist init: {} [{}, {}]'\n .format(param.name(), lower, upper))\n super(Histogram, self).__init__(title = \"({0}, {1})\".format(lower, upper))\n # add it to the parameter here\n if isinstance(param, ParameterBase):\n self.param = param # parameter we belong to is mandatory\n self.binCount = int(binCount) # bin count is mandatory\n self.xrange = (float(lower), float(upper))\n # setter chose the first option available for invalid options\n self.xscale = xscale\n self.yweight = yweight\n if not isinstance(autoFollow, bool):\n autoFollow = (autoFollow.title() == \"True\")\n self.autoFollow = autoFollow", "def add_histograms(self, doc=None):\n if doc is None:\n doc = self.doc\n\n doc.add_root(age_gender_histograms(\n self.data,\n self.palette['color'],\n self.palette['hover']\n ))\n LOG.info('histograms added')\n return doc", "def get_histogram(folder_name, image_name, save_location):\n print(\"Getting histogram for:\" + str(folder_name) + '/' + str(image_name))\n image = cv2.imread(folder_name + '/' + image_name, cv2.IMREAD_ANYDEPTH)\n plt.hist(image.ravel(), 256, [0, 65535])\n plt.xlabel('Pixel Intensity')\n plt.ylabel('Number of pixels')\n plt.title('Histogram of normalised reference image. Overnight2')\n plt.savefig(save_location + 'histogram.png')\n plt.savefig(save_location + 'histogram.eps', format='eps')\n # plt.show()", "def color_histogram_descriptors(bbdd_paintings, color_channel=0):\n X=np.empty((0))\n for painting in bbdd_paintings:\n painting = cv2.cvtColor(painting,cv2.COLOR_BGR2HSV)\n if color_channel==0 or color_channel==1 or color_channel==2:\n col_hist = cv2.calcHist(painting,[color_channel], None, [20], [0,256])\n elif color_channel==-1:\n col1_hist = cv2.calcHist(painting,[0], None, [256], [0,256])\n col2_hist = cv2.calcHist(painting,[1], None, [256], [0,256])\n col3_hist = cv2.calcHist(painting,[2], None, [256], [0,256])\n col_hist = np.concatenate([col1_hist, col2_hist, col3_hist])\n\n if X.size>0:\n X = np.vstack((X, col_hist.T))\n else:\n X = col_hist.T\n return X", "def helix_pair_stats (self):\n\n for Value in ['CrossingAngle','CrossingAngleEC','CrossingAngleIC']:\n\n HistogramPlot(np.array(self. values_list(Value, flat=True)), 'myproject/myapp/static/myapp/static/Stats/HelixPair/'+Value )\n #zrobic jakies dict coby robilo ranges, uzaleznialo np od zakresu albo od czegos\n\n return", "def hist(tobject):\n\n if not isinstance(tobject, _ProfileBase):\n return tobject.Clone() # return direct copy\n\n _h = tobject.empty_clone(type='D') # TODO: avoid hardcoding\n\n for _b_in, _b_out in zip(tobject, _h):\n _b_out.value = _b_in.value\n _b_out.error = _b_in.error\n\n return _h", "def featuresHist_colors(self, **kwargs):\n # Selecting bins automatically:\n bins_onpower = np.arange(self.onpower_train.min().values[0],\n self.onpower_train.max().values[0],\n (self.onpower_train.max().values[0] -\n self.onpower_train.min().values[0]) / 50)\n\n bins_offpower = np.arange(self.offpower_train.min().values[0],\n self.offpower_train.max().values[0],\n (self.offpower_train.max().values[0] -\n self.offpower_train.min().values[0]) / 50)\n\n bins_duration = np.arange(self.duration_train.min().values[0],\n self.duration_train.max().values[0],\n (self.duration_train.max().values[0] -\n self.duration_train.min().values[0]) / 50)\n\n # If a bin has been specified update the bin sizes.\n # Updating bins with specified values.\n for key in kwargs:\n if key == 'bins_onpower':\n bins_onpower = kwargs[key]\n elif key == 'bins_offpower':\n bins_offpower = kwargs[key]\n elif key == 'bins_duration':\n bins_duration = kwargs[key]\n else:\n print(\"Non valid kwarg\")\n\n # Plot:\n fig1 = plt.figure()\n ax1 = fig1.add_subplot(311)\n ax2 = fig1.add_subplot(312)\n ax3 = fig1.add_subplot(313)\n\n start = 0\n end = 0\n for ind in np.arange(len(self.stats)):\n\n if self.stats[ind]['Nevents'] != 0:\n if ind == 0:\n start = 0\n else:\n start = end\n end += self.stats[ind]['Nevents']\n ax1.hist(\n self.onpower_train[start:end].onpower.values, bins=bins_onpower, alpha=0.5)\n ax2.hist(\n self.offpower_train[start:end].offpower.values, bins=bins_offpower, alpha=0.5)\n ax3.hist(\n self.duration_train[start:end].duration.values, bins=bins_duration, alpha=0.5)\n\n ax1.set_title(\"Feature: Onpower\")\n ax1.set_xlabel(\"Watts\")\n ax1.set_ylabel(\"Counts\")\n\n ax2.set_title(\"Feature: Offpower\")\n ax2.set_xlabel(\"Watts\")\n ax2.set_ylabel(\"Counts\")\n\n ax3.set_title(\"Feature: Duration\")\n ax3.set_xlabel(\"Seconds\")\n ax3.set_ylabel(\"Counts\")", "def extract_color_histogram(self, bins=10):\n assert len(self.images) > 0, 'No images loaded! Did you call ' \\\n 'load_images() ?'\n color_histograms = []\n for image in self.images:\n histograms = []\n for color in range(image.shape[2]):\n band = image[:, :, color].reshape(-1)\n hist_values, bins = np.histogram(band, range=(0, 255),\n bins=bins)\n histograms += list(hist_values)\n color_histograms.append(histograms)\n color_histograms = np.array(color_histograms)\n color_histograms = color_histograms.astype('float')\n return color_histograms", "def draw_histogram(xx, hist_ax, alpha=1.0, colorV=None, facecolor='#80D080', edgecolor=None, nbins=75,\n fontsize=8, linewidth=1, xlabel=None, ylabel=None, label=None):\n plt.sca(hist_ax)\n if colorV is None:\n n, bins, patches = hist_ax.hist(xx, nbins, histtype='stepfilled', alpha=alpha, linewidth=linewidth, label=label)\n plt.setp(patches, 'facecolor', facecolor)\n if edgecolor is not None:\n plt.setp(patches, 'edgecolor', edgecolor)\n else:\n n, bins, patches = hist_ax.hist(xx, nbins, alpha=alpha, linewidth=linewidth, label=label)\n\n if xlabel is not None:\n hist_ax.set_xlabel(xlabel, fontsize=fontsize)\n if ylabel is not None:\n hist_ax.set_ylabel(ylabel, fontsize=fontsize)\n return hist_ax", "def histograma(frame):\n #Imagen mejorada por Equalizacion de Histogramas.\n histograma = cv2.equalizeHist(frame)\n return histograma", "def cassandra_histograms(mycluster=RING_1_dev__allnodes):\n cassandra_nodetool(mycluster,cmd=\"cfhistograms\")", "def setup_hist(self):\n self.x_min = {}\n self.x_max = {}\n self.x_max_minus_min = {}\n self.dx = {}\n self.n_bins = {}\n\n self.histogram_edges = {}\n self.histogram_values = {}\n self.histogram_cdf = {}", "def np_rgb_g_histogram(rgb):\n hist = np_rgb_channel_histogram(rgb, 1, \"G\")\n return hist", "def __call__(self, pred_texture: Image.Image, gt_texture: Image.Image) -> float:\n from plan2scene.evaluation.metric_impl.color_hist import hsl_hist_l1\n return hsl_hist_l1(pred=pred_texture, gt=gt_texture, bins=self.bins)", "def plot_channel_histogram(img, tri):\n rgb = ['red', 'green', 'blue']\n\n f = img[tri == 255]\n b = img[tri == 0]\n\n for source in [f, b]:\n for channel in range(source.shape[-1]):\n sns.distplot(source[:, channel], color=rgb[channel])", "def histogram(histo,nbr_launch,file):\n with open(\"Results/Histogram_{}_{}.txt\".format(nbr_launch,file.strip(\".yaml\")),'w') as f:\n f.write(\"mgm results :\"+\"\\n\")\n for val,occur in histo[\"mgm\"].items():\n f.write(\"value \"+str(val)+\" : \"+str(occur[0])+\" \"+\"Initial costs : \"+str(occur[1]).strip(\"[\"+\"]\")+\"\\n\")\n f.write(\"\\n\")\n f.write(\"mcs_mgm results :\" + \"\\n\")\n for val, occur in histo[\"mcs_mgm\"].items():\n f.write(\"value \" + str(val) + \" : \" + str(occur[0])+\" \"+\"Initial costs : \"+str(occur[1]).strip(\"[\"+\"]\")+\"\\n\")\n f.write(\"\\n\")\n f.write(\"gca_mgm results :\" + \"\\n\")\n for val, occur in histo[\"gca_mgm\"].items():\n f.write(\"value \" + str(val) + \" : \" + str(occur[0])+\" \"+\"Initial costs : \"+str(occur[1]).strip(\"[\"+\"]\")+\"\\n\")", "def plot_hitstogram_graph(data_values, title,\r\n number_of_keys,\r\n max_val,\r\n file_in):\r\n\r\n # bins = max(data_values)\r\n # pylab.hist(data_values, facecolor='blue')\r\n pylab.hist(data_values, facecolor='green', alpha=0.6)\r\n pylab.grid(True)\r\n pylab.title(title + \"_histogram\")\r\n pylab.xlabel('number in cluster')\r\n pylab.ylabel('Count')\r\n pylab.savefig(file_in + \"_\" + title + '_histogram.png')\r\n plt.close()\r\n pylab.close()\r\n os.chdir('..')", "def plot_colors(hist, centroids):\n bar = np.zeros((50, 300, 3), dtype=\"uint8\")\n startX = 0\n\n for (percent, color) in zip(hist, centroids):\n # plot the relative percentage of each cluster\n endX = startX + (percent * 300)\n cv2.rectangle(bar, (int(startX), 0), (int(endX), 50),\n color.astype(\"uint8\").tolist(), -1)\n startX = endX\n\n # return the bar chart\n return bar", "def test_format_histograms(self):\r\n self.assertEqual(format_histograms(array([0, 1, 0, 2, 2, 3]),\r\n array(\r\n [2, 1, 0, 2, 0, 0]), array(\r\n [0, 0, 0, 2, 0, 1]),\r\n array(\r\n [100, 110, 120, 130, 140, 150, 160])),\r\n \"\"\"# bins raw sequence lengths, length of sequences that pass quality filters before processing, and lengths of sequences that pass quality filters post processing.\\nLength\\tRaw\\tBefore\\tAfter\\n100\\t0\\t2\\t0\\n110\\t1\\t1\\t0\\n120\\t0\\t0\\t0\\n130\\t2\\t2\\t2\\n140\\t2\\t0\\t0\\n150\\t3\\t0\\t1\"\"\")", "def HsvHistogram(self):\n if not self._hsvHistogram is 0:\n return self._hsvHistogram\n if not self._bicHistogram is 0:\n self._hsvHistogram = self._bicHistogram[:28] + self._bicHistogram[28:]\n return self._hsvHistogram\n hsvimg = self.HsvImage()\n #Note that in OpenCV hsv uses the ranges [0,179], [0,255] and [0,255] respectively\n histogram = numpy.zeros(28, dtype=numpy.float32)\n [width, height, depth] = hsvimg.shape\n for y in xrange(height):\n for x in xrange(width):\n histogram[self.HsvBin(x,y)] += 1\n \n histogram /= width*height\n \n sHistogram = numpy.zeros(28, dtype=numpy.float32)\n sHistogram[0] = 0.25 * histogram[20] + 0.5 * histogram[0] + 0.25 * histogram[1]\n sHistogram[20] = 0.5 * histogram[20] + 0.25 * histogram[0] + 0.25 * histogram[19]\n \n for i in xrange(1, 19):\n sHistogram[i] = 0.25 * histogram[i-1] + 0.5 * histogram[i] + 0.25 * histogram[i+1]\n \n self._hsvHistogram = sHistogram\n return sHistogram", "def histogram(self):\r\n channel = self.ui.channel_selection.itemData(self.ui.channel_selection.currentIndex())\r\n\r\n #create a window, the reference must be stored, because the window\r\n #gets destroyed when its reference is garbage collected\r\n #make plotWindow a list and append to that if multiple windows should be possible\r\n title = \"histogram of {:s} channel\".format(self.ui.channel_selection.currentText())\r\n self.plotWindow = pyguitools.SimplePlotWindow(name = title)\r\n self.plotWindow.ax1.hist(self.npImg[self.ui.y0.value():self.ui.y1.value(),\r\n self.ui.x0.value():self.ui.x1.value(), \r\n channel].flatten(),\r\n bins=self.settings[\"histogramm bins\"],\r\n range=(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]))\r\n self.plotWindow.ax1.set_xlim(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]) \r\n self.plotWindow.show()", "def get_histogram(self, column):\n\n df_histogram = pd.DataFrame(self.serie)\n #histogramcol = alt.Chart(df_histogram).mark_bar().encode(alt.X(column, bin=True), y='count()')\n histogramcol = alt.Chart(df_histogram).mark_bar().encode(alt.X(column, bin=alt.Bin(maxbins=50)),y='count()')\n return histogramcol", "def cs4243_histequ(image, grey_level=256):\n ###your code here####\n \n # get the original histogram\n x, y = image.shape\n hist = [0] * grey_level\n for i in range(x):\n for j in range(y):\n hist[image[i, j]]+=1\n ori_hist = hist\n \n # get the cumulative distribution function (CDF) normalised to image size\n cum_hist = [sum(ori_hist[:i+1]) for i in range(len(ori_hist))]\n cum_hist = np.array(cum_hist) / (x*y)\n \n # get the uniform histogram from normalised CDF\n uniform_hist = np.uint8((grey_level-1) * cum_hist)\n \n ###\n\n # Set the intensity of the pixel in the raw image to its corresponding new intensity \n height, width = image.shape\n res_image = np.zeros(image.shape, dtype='uint8') # Note the type of elements\n for i in range(height):\n for j in range(width):\n res_image[i,j] = uniform_hist[image[i,j]]\n \n uni_hist = np.bincount(res_image.flatten(), minlength=grey_level)\n return ori_hist, cum_hist, res_image, uni_hist", "def AddHistogramComponent(histogram, component):\n node_to_insert_before = histogram.lastChild\n _AddTextNodeWithNewLineAndIndent(histogram, node_to_insert_before)\n\n document = histogram.ownerDocument\n component_element = document.createElement('component')\n component_element.appendChild(document.createTextNode(component))\n histogram.insertBefore(component_element, node_to_insert_before)", "def h3(data, bins=None, **kwargs):\n return histogramdd(data, bins, **kwargs)", "def compute_histogram(image, n_bins, color_space=\"RGB\"):\n\n n_channels = 1 if color_space == \"GRAY\" else image.shape[2]\n\n hist_channels = list(range(n_channels))\n hist_bins = [n_bins,]*n_channels\n hist_range = [0, 256]*n_channels\n\n hist = cv.calcHist([image], hist_channels, None, hist_bins,\n hist_range)\n hist = cv.normalize(hist, hist, alpha=0, beta=1,\n norm_type=cv.NORM_MINMAX).flatten() # change histogram range from [0,256] to [0,1]\n return hist", "def equalise_hist(image, bin_count=256):\n # TODO: your histogram equalization code\n #define arrays\n image = img_as_ubyte(image)\n row,col = image.shape\n new_image = np.zeros((row,col),dtype='uint8') \n\n # compute the value of each grayscale,and save in image_hist \n image_hist = np.bincount(image.flatten(), minlength=(bin_count))\n\n # normalise n[]\n norm_arr = (np.cumsum(image_hist)/(image.size))*(bin_count-1)\n norm_arr = norm_arr.astype('uint8')\n \n #Compute a normalized cumulative histogram\n for x in range(row):\n for y in range(col):\n new_image[x,y] = norm_arr[image[x,y]]\n \n return new_image", "def mostrar_histograma(self):\n histograma = self.imagem_core.get_histograma()\n\n graph = Graph(\n xlabel='Tom de Cinza',\n ylabel='Quantidade de tons',\n padding=5,\n xmin=0,\n xmax=max(histograma.keys()),\n ymin=0,\n ymax=max(histograma.values())\n )\n plot = MeshLinePlot()\n plot.points = histograma.items()\n graph.add_plot(plot)\n self.widgets_dinamicos.append(graph)\n self.add_widget(graph)", "def import_histo(w,hz,xmin,xmax):\n print 'Loading histogram with',hz.GetEntries(),'entries'\n data = RooDataHist('data','Zmumu MC',RooArgList(w.var('x')),hz)\n return data", "def np_rgb_channel_histogram(rgb, ch_num, ch_name):\n\n ch = rgb[:, :, ch_num]\n ch = ch.flatten()\n title = \"RGB %s Histogram, mean=%.2f, std=%.2f\" % (ch_name, np.mean(ch), np.std(ch))\n return np_histogram(ch, title, bins=256)", "def create_histograms(PrimaryParticleName, LongVectorSignals, LongVectorSignalsCher,\n\tShortVectorSignals, ShortVectorSignalsCher, LongScinMaxFiber, LongCherMaxFiber, \n\tShortScinMaxFiber, ShortCherMaxFiber, EnergyTotContainer, MaxEnergyTotContainer):\n\n\t#Set ROOT histograms\n\tTH1LongScin = TH1F(\"LongScintillation\", PrimaryParticleName, 100, 0.0, LongScinMaxFiber+200.)\n\tTH1LongCher = TH1F(\"LongCherenkov\", PrimaryParticleName, 100, 0.0, LongCherMaxFiber+200.)\n\tTH1ShortScin = TH1F(\"ShortScintillation\", PrimaryParticleName, 100, 0.0, ShortScinMaxFiber+200.)\n\tTH1ShortCher = TH1F(\"ShortCherenkov\", PrimaryParticleName, 100, 0.0, ShortCherMaxFiber+200.)\n\tTH1EnergyTot = TH1F(\"EnergyTot\", PrimaryParticleName, 100, MaxEnergyTotContainer-10000., MaxEnergyTotContainer+500.) \n\n\t#Fill histograms in for loop\n\tfor index in range(len(LongVectorSignals)):\n\t\tTH1LongScin.Fill(LongVectorSignals[index])\n\t\tTH1LongCher.Fill(LongVectorSignalsCher[index])\n\t\tTH1ShortScin.Fill(ShortVectorSignals[index])\n\t\tTH1ShortCher.Fill(ShortVectorSignalsCher[index])\n\t\tTH1EnergyTot.Fill(EnergyTotContainer[index])\n\n\t#Draw + DrawOptions\n\tStyle = gStyle\n\tStyle.SetOptStat(1) #Show statistics\n\tStyle.SetLineWidth(1)\n\tXAxis = TH1LongScin.GetXaxis() #TH1LongScin\n\tXAxis.SetTitle(\"Energy (MeV)\")\n\tXAxis.SetTitleOffset(1.2)\n\tYAxis = TH1LongScin.GetYaxis()\n\tYAxis.SetTitle(\"Entries\")\n\tTH1LongScin.Draw()\n\tgPad.SaveAs(\"EnergyLongScin.eps\")\n\tXAxis = TH1LongCher.GetXaxis() #TH1LongCher\n\tXAxis.SetTitle(\"# Cher p.e.\")\n\tXAxis.SetTitleOffset(1.2)\n\tYAxis = TH1LongCher.GetYaxis()\n\tYAxis.SetTitle(\"Entries\")\n\tTH1LongCher.Draw()\n\tgPad.SaveAs(\"CherpeLong.eps\")\n\tXAxis = TH1ShortScin.GetXaxis() #TH1ShortScin\n\tXAxis.SetTitle(\"Energy (MeV)\")\n\tXAxis.SetTitleOffset(1.2)\n\tYAxis = TH1ShortScin.GetYaxis()\n\tYAxis.SetTitle(\"Entries\")\n\tTH1ShortScin.Draw()\n\tgPad.SaveAs(\"EnergyShortScin.eps\")\n\tXAxis = TH1ShortCher.GetXaxis() #TH1ShortCher\n\tXAxis.SetTitle(\"# Cher p.e.\")\n\tXAxis.SetTitleOffset(1.2)\n\tYAxis = TH1ShortCher.GetYaxis()\n\tYAxis.SetTitle(\"Entries\")\n\tTH1ShortCher.Draw()\n\tgPad.SaveAs(\"CherpeShort.eps\")\n\tXAxis = TH1EnergyTot.GetXaxis() #TH1EnergyTot\n\tXAxis.SetTitle(\"Energy (MeV)\")\n\tXAxis.SetTitleOffset(1.2)\n\tYAxis = TH1EnergyTot.GetYaxis()\n\tYAxis.SetTitle(\"Entries\")\n\tTH1EnergyTot.Draw()\n\tgPad.SaveAs(\"EnergyTot.eps\")", "def histogram(self):\n return self._hist", "def htable(nbuckets):", "def efficient_Make_Binned_ROC_histograms(title, data, bins, PU_range='full'):\n diff_ran = (-25,25)\n diff_bins = diff_ran[1]-diff_ran[0]\n ratio_ran = (0,10)\n ratio_bins = 60\n\n Diff_hist_list = []\n Ratio_hist_list = []\n CSV_hist_list = []\n ZeroDiv_list = []\n for bin_ in range(len(bins)-1):\n Diff_hist_list.append(rt.TH1D(\"L4-L1_\"+str(bins[bin_])+\"_\"+str(bins[bin_+1]),\"L4-L1_\"+str(bins[bin_])+\"_\"+str(bins[bin_+1]),diff_bins,diff_ran[0],diff_ran[1]))\n Ratio_hist_list.append(rt.TH1D(\"L4_L1_\"+str(bins[bin_])+\"_\"+str(bins[bin_+1]),\"L4_L1_\"+str(bins[bin_])+\"_\"+str(bins[bin_+1]),ratio_bins,ratio_ran[0],ratio_ran[1]))\n CSV_hist_list.append(rt.TH1D(\"CSV_\"+str(bins[bin_])+\"_\"+str(bins[bin_+1]),\"CSV_\"+str(bins[bin_])+\"_\"+str(bins[bin_+1]),ratio_bins,0,1))\n ZeroDiv_list.append(0)\n\n for particle in data:\n if PU_range != 'full':\n if particle[-1]<PU_range[0] or particle[-1]>PU_range[1]: continue\n bin_number = FCM.bin_selection(particle,bins)\n if bin_number == -100: continue\n\n Diff_hist_list[bin_number].Fill(particle[8]-particle[5])\n CSV_hist_list[bin_number].Fill(particle[1])\n if particle[17] != 0:\n L4_L1 = particle[20]/particle[17]\n Ratio_hist_list[bin_number].Fill(L4_L1)\n else:\n ZeroDiv_list[bin_number] += 1\n\n tfile = rt.TFile(\"Thesis_Plots/root_files/{}_histograms.root\".format(title),\"recreate\")\n for hist in Diff_hist_list:\n hist.Write()\n for hist in Ratio_hist_list:\n hist.Write()\n for hist in CSV_hist_list:\n hist.Write()\n print \"saved histograms in Thesis_Plots/root_files/{}_histograms.root\".format(title)\n\n csv_file = open(\"Thesis_Plots/root_files/{}_ZeroDiv.csv\".format(title),\"wb\")\n writer = csv.writer(csv_file)\n writer.writerow(ZeroDiv_list)\n csv_file.close()\n print \"saved zero division occurences in Thesis_Plots/root_files/{}_ZeroDiv.csv\".format(title)", "def compute_histogram(im, block_factor=3, color_space='HSV'):\n\n # Shape = rows and columns\n remainder_rows = im.shape[0] % block_factor\n remainder_cols = im.shape[1] % block_factor\n\n im_block = cv2.copyMakeBorder(im, block_factor - remainder_rows, 0, block_factor - remainder_cols, 0,\n cv2.BORDER_CONSTANT)\n\n windowsize_r = int(im_block.shape[0] / block_factor)\n windowsize_c = int(im_block.shape[1] / block_factor)\n\n # print(im_block.shape)\n # print(str(windowsize_r)+' '+str(windowsize_c))\n # cv2.imshow(\"fullImg\", im_block)\n\n hist = []\n for r in range(0, im_block.shape[0], windowsize_r):\n for c in range(0, im_block.shape[1], windowsize_c):\n hist_blocks = []\n window = im_block[r:r + windowsize_r, c:c + windowsize_c]\n if color_space == 'GRAY':\n window_gray = cv2.cvtColor(window, cv2.COLOR_BGR2GRAY)\n hist_block = cv2.calcHist([window_gray], [0], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n elif color_space == 'RGB':\n hist_block = cv2.calcHist([window], [0], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [1], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [2], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n elif color_space == 'HSV':\n window = cv2.cvtColor(window, cv2.COLOR_BGR2HSV)\n hist_block = cv2.calcHist([window], [0], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [1], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [2], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n \n hist.append(hist_blocks)\n\n return hist", "def glGetHistogram( baseFunction, target, reset, format, type, values=None):\r\n if values is None:\r\n width = glGetHistogramParameteriv(\r\n target,\r\n GL_HISTOGRAM_WIDTH,\r\n )\r\n values = images.images.SetupPixelRead( format, (width,4), type )\r\n arrayType = arrays.GL_CONSTANT_TO_ARRAY_TYPE[\r\n images.images.TYPE_TO_ARRAYTYPE.get(type,type)\r\n ]\r\n baseFunction(\r\n target, reset, format, type,\r\n ctypes.c_void_p( arrayType.dataPointer(values))\r\n )\r\n return values", "def addHistogram1D(self, name, title, n_bins, minimum, maximum):\n\t\tself.histograms[ name ] = ROOT.TH1F(name, title, n_bins, minimum, maximum)", "def histogram(self, x = \"Predictor\", y = None, is_horizontal = False, color = None, facet_col = None, facet_row = None, \n bins = 20, opacity = 1, marginal = None, template = \"ggplot2\", has_title = True, title = None):\n x_clean, df_clean = clean_varname(self._df, var = x)\n\n #These can probably be converted into Lambda functions... but for now, this works.\n if color:\n color_clean, df_clean = clean_varname(df_clean, var = color)\n else:\n color_clean = color\n if y:\n y_clean, df_clean = clean_varname(df_clean, var = y)\n else:\n y_clean = y\n if facet_col:\n facet_col_clean, df_clean = clean_varname(df_clean, var = facet_col)\n else:\n facet_col_clean = facet_col\n if facet_row:\n facet_row_clean, df_clean = clean_varname(df_clean, var = facet_row)\n else:\n facet_row_clean = facet_row\n\n if has_title:\n if not title:\n title = f\"Histogram of {x_clean}\"\n\n if is_horizontal:\n fig = px.histogram(df_clean, y=x_clean, x = y_clean, color=color_clean, title = title,\n marginal = marginal, template = template, opacity = opacity, \n nbins=bins, facet_col = facet_col_clean, facet_row = facet_row_clean)\n else:\n fig = px.histogram(df_clean, x=x_clean, y = y_clean, color=color_clean, title = title,\n marginal = marginal, template = template, opacity = opacity, \n nbins=bins, facet_col = facet_col_clean, facet_row = facet_row_clean)\n return fig", "def test_get_histogram_stat_by(self):\n pass", "def make_histogram(cluster):\n numLabels = np.arange(0, len(np.unique(cluster.labels_)) + 1)\n hist, _ = np.histogram(cluster.labels_, bins=numLabels)\n hist = hist.astype('float32')\n hist /= hist.sum()\n return hist", "def histogram(self, filename, column_name):\n plt.figure()\n self.data[column_name].hist()\n plt.savefig(filename)", "def create_histogram(self, i):\n # styling\n sns.set(style=\"whitegrid\")\n font = {'weight': 'normal'}\n plt.rc('font', **font)\n plt.rc('axes', labelsize=25) # fontsize of the x and y labels\n plt.rc('xtick', labelsize=25) # fontsize of the tick labels\n plt.rc('ytick', labelsize=25)\n fig, ax = plt.subplots(1, 1, figsize=(5, 5), dpi=100)\n try:\n if self.dtype_is_object() or self.num_of_values() <= 15:\n if self.num_of_values() > 15:\n data = pd.to_numeric(self.data, errors='coerce')\n plot = sns.distplot(data.dropna())\n else:\n plot = sns.countplot(self.remove_nan_values())\n else:\n plot = sns.distplot(self.remove_nan_values())\n plot.set(xlabel='', ylabel='')\n except Exception:\n plt.text(0.5, 0.5, f'Unable to plot', ha='center', va='center', transform=ax.transAxes, fontsize=16)\n if not os.path.isdir('hist_images'):\n os.mkdir('hist_images')\n plt.savefig(f'hist_images/histogram{i}.png', bbox_inches='tight')\n plt.close()\n plt.clf()", "def __init__(self,name, histogram):\n\n\n assert isinstance(histogram,Histogram), \"input must be a 3ML histogram\"\n\n self._histogram = histogram #type: Histogram\n\n\n super(HistLike, self).__init__(name=name,\n x=self._histogram.mid_points,\n y=self._histogram.contents,\n yerr=self._histogram.errors,\n poisson_data=self._histogram.is_poisson)", "def histograms(self, *args, **kwargs):\n return _image.image_histograms(self, *args, **kwargs)", "def hist(self,geo,pfile):\n\n # Create histogram of box data, rounding to nearest integers if temperature\n boxdata = self.img.flatten()\n imin = int(round(min(boxdata))) - 1\n imax = int(round(max(boxdata))) + 1\n ni = imax-imin+1 # number of bins to plot\n h = np.zeros(ni,dtype=int) # initialise with zeros\n for val in boxdata: # assign each image value to a bin\n i = int(round(val)) - imin \n h[i] += 1\n n = sum(h) # total number of values binned\n h = h * 100.0/n # convert no.in bins to %frequency\n plt.figure(WINDOW_HIST,figsize=(4,4))\n plt.clf()\n # Create title for histogram plot\n ttl = self.desc + '\\n' + \\\n 'Box: X=' + str(self.ix-self.mbox) + ':' \\\n + str(self.ix) + ':' \\\n + str(self.ix+self.mbox) + \\\n ', Y=' + str(self.iy-self.mbox) + ':' \\\n + str(self.iy) + ':' \\\n + str(self.iy+self.mbox)\n plt.title(ttl)\n plt.ylabel(\"% Frequency\")\n tdisp = self.label in ( 'T9', 'T10', 'TS' )\n if tdisp: plt.xlabel(\"Pixel Temperature [K]\")\n else: plt.xlabel(\"Pixel Value [0:255]\")\n xval = np.arange(imin,imax+1,dtype=int)\n # Set colour of histogram according to channel\n plt.bar(xval,h,color=plot_colours.get(self.label,'gray'))\n x0,x1 = plt.xlim()\n y0,y1 = plt.ylim()\n boxmean = np.mean(boxdata)\n boxsd = np.std(boxdata)\n midpix = self.img[self.mbox,self.mbox]\n plt.plot( boxmean+[0,0], [y0,y1], ':', color='black' )\n plt.errorbar ( boxmean, 0.9*y1, xerr=boxsd, color='black', \n capsize=4 )\n plt.plot ( midpix, 0.9*y1, 's', color='black', \n markerfacecolor='none' ) \n plt.tight_layout()\n if boxmean > 0.5 * ( x1 + x0 ): xt = x0 + 0.4 * ( x1 - x0 )\n else: xt = x0 + 0.95*(x1-x0)\n yt = y0 + 0.95*(y1-y0)\n yd = 0.05*(y1-y0)\n text = 'Mean = {:6.2f}'.format(boxmean)\n plt.text(xt,yt,text,ha=\"right\")\n yt -= yd\n text = 'S.D. = {:6.2f}'.format(boxsd)\n plt.text(xt,yt,text,ha=\"right\")\n yt -= yd\n text = 'NPix = {:6n}'.format(n)\n plt.text(xt,yt,text,ha=\"right\")\n yt -= yd\n if tdisp: text = 'MidPix = {:6.2f}'.format(midpix)\n else: text = 'MidPix = {:6n}'.format(midpix)\n plt.text(xt,yt,text,ha=\"right\")\n if geo.cal:\n lat,lon,zen = geo.locate(self.ix,self.iy) \n text = 'Lat = {:6.2f}'.format(lat)\n yt -= yd\n plt.text(xt,yt,text,ha=\"right\") \n text = 'Lon = {:6.2f}'.format(lon)\n yt -= yd\n plt.text(xt,yt,text,ha=\"right\") \n if pfile: \n file = input ( \"Save to file (<CR>=hist.pdf): \" ) or \"hist.pdf\"\n plt.savefig(file)", "def build_hist(concept_values: np.ndarray, num_bins: int = 100) -> np.ndarray:\n hist, _ = np.histogram(concept_values, bins=num_bins, range=(0., 1.), density=True)\n return hist", "def _create_histogram(self, histogram_data, feature):\n hist_source = self._create_histogram_source(histogram_data, feature)\n hist_plot = self._create_histogram_plot(hist_source)\n return hist_source, hist_plot", "def overlay_histogram_phred(df, path, settings):\n df[\"phredIdentity\"] = -10 * np.log10(1 - (df[\"percentIdentity\"] / 100))\n df[\"phredIdentity\"][np.isinf(df[\"phredIdentity\"])] = 60\n\n palette = (\n settings[\"colors\"] if settings[\"colors\"] else cycle(plotly.colors.DEFAULT_PLOTLY_COLORS)\n )\n\n hist_phred = Plot(\n path=path + \"NanoComp_OverlayHistogram_PhredScore.html\",\n title=\"Histogram of Phred scores\",\n )\n\n hist_phred.html, hist_phred.fig = plot_overlay_histogram(\n df, palette, \"phredIdentity\", hist_phred.title, bins=20, density=True\n )\n\n hist_phred.save(settings)\n\n return hist_phred", "def histo_summary(self, tag, values, step, bins=1000):\n\n # Create a histogram using numpy\n counts, bin_edges = np.histogram(values, bins=bins)\n\n # Fill the fields of the histogram proto\n hist = tf.HistogramProto()\n hist.min = float(np.min(values))\n hist.max = float(np.max(values))\n hist.num = int(np.prod(values.shape))\n hist.sum = float(np.sum(values))\n hist.sum_squares = float(np.sum(values**2))\n\n # Drop the start of the first bin\n bin_edges = bin_edges[1:]\n\n # Add bin edges and counts\n for edge in bin_edges:\n hist.bucket_limit.append(edge)\n for c in counts:\n hist.bucket.append(c)\n\n # Create and write Summary\n with self.writer.as_default():\n tf.summary.histogram(name=tag, data=hist, step=step)\n self.writer.flush()", "def hist(img):\n bottom_half = img[img.shape[0]//2:,:] # 0:img.shape[0]//2 is the top half\n histogram = bottom_half.sum(axis=0) \n \n return histogram", "def histo_summary(self, tag, values, step, bins=1000):\n\n # Create a histogram using numpy\n counts, bin_edges = np.histogram(values, bins=bins)\n\n # Fill the fields of the histogram proto\n hist = tf.HistogramProto()\n hist.min = float(np.min(values))\n hist.max = float(np.max(values))\n hist.num = int(np.prod(values.shape))\n hist.sum = float(np.sum(values))\n hist.sum_squares = float(np.sum(values**2))\n\n # Drop the start of the first bin\n bin_edges = bin_edges[1:]\n\n # Add bin edges and counts\n for edge in bin_edges:\n hist.bucket_limit.append(edge)\n for c in counts:\n hist.bucket.append(c)\n\n # Create and write Summary\n summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=hist)])\n self.writer.add_summary(summary, step)\n self.writer.flush()", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\r\n pylab.hist(values, bins = numBins)\r\n pylab.xlabel(xLabel)\r\n pylab.ylabel(yLabel)\r\n if title != None:\r\n pylab.title(title)\r\n pylab.show()", "def np_rgb_b_histogram(rgb):\n hist = np_rgb_channel_histogram(rgb, 2, \"B\")\n return hist", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\n pylab.hist(values, bins = numBins)\n pylab.xlabel(xLabel)\n pylab.ylabel(yLabel)\n if not title == None:\n pylab.title(title)\n pylab.show()" ]
[ "0.6663404", "0.6595014", "0.63798743", "0.63636565", "0.63032633", "0.6286832", "0.62409437", "0.62236524", "0.6212975", "0.62078536", "0.6198712", "0.6159521", "0.61527514", "0.61278516", "0.6118984", "0.60784984", "0.60736775", "0.60601574", "0.6052596", "0.6043766", "0.60393196", "0.6027854", "0.60185564", "0.599017", "0.598105", "0.5976171", "0.5976171", "0.5976171", "0.59723276", "0.5957867", "0.59577614", "0.59463394", "0.59459865", "0.59388095", "0.5914388", "0.5912747", "0.5912747", "0.5903274", "0.58960664", "0.58885044", "0.58836174", "0.58818245", "0.58680433", "0.58530873", "0.58445746", "0.5836953", "0.5822452", "0.5821461", "0.5808207", "0.5788574", "0.5782608", "0.57786113", "0.5774386", "0.57618546", "0.57595676", "0.5753565", "0.57512695", "0.5740053", "0.5737997", "0.5728268", "0.5723971", "0.57231927", "0.57196456", "0.5709436", "0.5706837", "0.5701482", "0.56992805", "0.5692491", "0.5683826", "0.5682891", "0.56789666", "0.5670691", "0.56558585", "0.5655452", "0.5654344", "0.56469107", "0.56456035", "0.56363845", "0.5633219", "0.5633199", "0.56292", "0.5622748", "0.5619194", "0.56185776", "0.56110567", "0.5609054", "0.5604934", "0.56042033", "0.5600629", "0.5600316", "0.55980355", "0.5593239", "0.5591283", "0.55911905", "0.5586151", "0.55850327", "0.5582547", "0.55819356", "0.5580361", "0.5576917" ]
0.7029851
0
Reopen streams here to prevent buffering.
def reopen_streams(self): sys.stdin = os.fdopen(0, 'r', 0) sys.stdout = os.fdopen(1, 'w', 0) sys.stderr = os.fdopen(2, 'w', 0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reopen():", "def refresh(self) -> None:\n if self._is_buffer_full():\n self.stream.close()\n self._open_stream() # re-initial self.stream\n self._buffer = bytearray()\n self._buffer_pointer = -1", "def reopen_files(self):\r\n for log in (self.error_log, self.access_log):\r\n for h in log.handlers:\r\n if isinstance(h, logging.FileHandler):\r\n h.acquire()\r\n h.stream.close()\r\n h.stream = open(h.baseFilename, h.mode)\r\n h.release()", "def _close_stream(self):\n if self.stream:\n try:\n if not self.stream.closed:\n self.stream.flush()\n self.stream.close()\n finally:\n self.stream = None", "def cycle_stream_mode(self, read:bool=False):\n if read:\n self.io_stream.close()\n self.io_stream = open(self.memory_filename,\"r+b\")\n else:\n self.io_stream.close()\n self.io_stream = open(self.memory_filename,\"w+b\")", "def reopen(self):\n self.close()\n self._fileobj = os.fdopen(os.open(str(self.path), os.O_CREAT | os.O_RDWR, 384), \"r+b\", 0)", "def _ReopenFileObject(self):\n if self._file_object:\n self._file_object.close()\n self._file_object = None\n\n self._file_object = self._zip_file.open(self.name, mode='r')\n self._stream_offset = 0", "def flush(self):\n if not self.writable():\n # don't flush non-writable streams\n return\n return super(Open, self).flush()", "def release(self):\r\n for s in self.streams:\r\n s.release()", "def reopen(self):\n self.close()\n self._fileobj = os.fdopen(\n os.open(str(self.path), os.O_CREAT | os.O_RDWR, 384), \"r+b\", 0\n )", "def finalize(self):\r\n self.stream.finalize()", "def flush(self):\n for stream in self.streams:\n stream.flush()", "def close(self):\n self.closed = True\n for stream in self.streams:\n stream.close()", "def purge(self):\n if self.is_open and self.direction == 'recv':\n try:\n self.fd.seek(0, os.SEEK_END)\n except (AttributeError, ValueError): # pragma: debug\n if self.is_open:\n raise", "def hard_reset(self):\n self.close()\n self.open()", "def reopen(self):\n self.event_writer.reopen()\n self._closed = False", "def rewind():", "def release(self):\n self.stream.release()", "def stream_closed(self,stream):\n pass", "def _on_close(self):\n self._stream = None\n self._is_secure = False\n self._process_request()", "def _close_stream_lock(self):\n if self.stream_lock:\n try:\n if not self.stream_lock.closed:\n self.stream_lock.flush()\n self.stream_lock.close()\n finally:\n self.stream_lock = None", "def stream_reset(self, event):\n log.debug(\"stream reset, stream %s\", event.stream_id)\n receive_stream = self.receive_streams.pop(event.stream_id, None)\n if receive_stream:\n receive_stream.close()", "def swap_buffers(self):\n self._frames += 1\n if self._frames > 3:\n self.close()", "def close(self):\n if self.closed:\n return\n self._stream.close()\n self._stream = None", "def open(self):\n self.closing = False", "def _flush(self):", "def open(self):\n with self._not_full:\n self._closed = False", "def close(self):\n\n if self.is_closed:\n return\n\n while self.read(RESTResponse.BLOCKSIZE):\n pass\n\n self.is_closed = True\n self.urllib3_response.release_conn()", "def close(self):\n self._raw_stream.close()", "def rollback(self):\n self.stream.seek(0)", "def close(self):\n self.rfile.close()\n self.wfile.close()\n sys.stderr.write(\"Proxy off\\n\")\n sys.stderr.flush()", "def reset(self):\n\t\tself.buf = []", "def stream_flush(self):\n self._rtmp_stream.flush()", "def close(self):\n self._stream.close()\n self._stream = None", "def flush(self):\n self._stream.flush()", "def close(self):\n self.read1_batch = None\n self.read2_batch = None\n self.writer.close()", "def release(self):\r\n try:\r\n EventStream.AllStreams.remove(self)\r\n self.grab(False)\r\n os.close(self.filehandle)\r\n except:\r\n pass", "def clear_streams(self):\n self.stop_streams()\n self.streams.clear()", "def close(self):\n try:\n self._close_stream()\n self._close_stream_lock()\n finally:\n self.stream = None\n self.stream_lock = None", "def open(self):\n streaming_specs = self.get_streaming_specs()\n self._stream = chunked_requests.Stream(**streaming_specs)", "def close(self):\n self._stderr_reader.join()", "def reOpenConnection(self):\r\n self.blptsAnalytics.closeSession()\r\n self.blptsAnalytics = None\r\n self.bbgstreamBIDEM.closeSubscription()\r\n self.bbgstreamBIDEM = None\r\n self.streamWatcherBID = None\r\n self.streamWatcherAnalytics = None\r\n self.blptsPriceOnly.closeSession()\r\n self.streamWatcherPriceOnly = None\r\n self.firstPass()\r\n self.startUpdates()", "def dispose(self) -> None:", "def __del__(self): \n \n self.flush()", "def close(self):\n self.p.close(self.inStream)", "def _flush_raw_or_buffered(self):\n # Flush only if bytes written\n # This avoids no required process/thread creation and network call.\n # This step is performed by raw stream.\n if self._buffer_seek and self._seek:\n self._seek += 1\n with handle_os_exceptions():\n self._flush()\n\n # If data lower than buffer size, flush data with raw stream to reduce IO calls\n elif self._buffer_seek:\n self._raw._write_buffer = self._get_buffer()\n self._raw._seek = self._buffer_seek\n self._raw.flush()", "def monkey_patch_h2_reset_on_closed_stream_bug():\n h2.stream._transitions[h2.stream.StreamState.CLOSED,\n h2.stream.StreamInputs.RECV_RST_STREAM] = \\\n (None, h2.stream.StreamState.CLOSED)\n\n h2.stream._transitions[h2.stream.StreamState.CLOSED,\n h2.stream.StreamInputs.RECV_WINDOW_UPDATE] = \\\n (None, h2.stream.StreamState.CLOSED)", "def finalize(self):\r\n\r\n self._flush()\r\n self.context.close()", "def close():", "def _flush(self):\r\n\r\n fh = open(self.filename, \"a\")\r\n fh.write(\"\".join(self.buffer))\r\n fh.close()\r\n\r\n self.buffer = []", "def free(self):\r\n return bass_call(BASS_StreamFree, self.handle)", "def resume_writing(self):\n self._stream.resume_writing()", "def destroy_input_stream(self):\n\n # Destroy the stream\n del self.inp\n\n # Garbage collect\n gc.collect()", "def flush_buffers(self):\n if self.format == 'ENVI':\n if self.write:\n for row, frame in self.frames.items():\n valid = s.logical_not(s.isnan(frame[:, 0]))\n if self.file.metadata['interleave'] == 'bil':\n self.memmap[row, :, valid] = frame[valid, :].T\n else:\n self.memmap[row, valid, :] = frame[valid, :]\n self.frames = OrderedDict()\n del self.file\n self.file = envi.open(self.fname+'.hdr', self.fname)\n self.open_map_with_retries()", "def reset(self):\n for reader in self.__readers:\n reader.reset()\n if self.__buffer is not None:\n self.__buffer.clear()\n self.__length = 0", "def _buffer_all(self):\n self._buffer()", "def reopen(self):\n self._FITS.close()\n del self._FITS\n self._FITS = _fitsio_wrap.FITS(self._filename, self.intmode, 0)\n self.update_hdu_list()", "def start_stream(self):\n pass", "def dispose(self):", "def release(self):\n try:\n if self.stream_lock and not self.stream_lock.closed:\n unlock(self.stream_lock)\n except Exception:\n pass\n finally:\n BaseRotatingHandler.release(self)", "def _close(self):\n self.write_data(self.write_queue)\n self.write_compound(self.write_compound_queue)", "def flush_and_dispose(self):\n yield self.flush()\n self.dispose()", "def __del__(self):\n if self.reader is not None:\n self.reader.close()", "def resume_reading(self):\n if not self._paused_reading:\n raise RuntimeError('Not paused')\n self._paused_reading = False\n if not self._closing:\n self._loop.add_reader(self._sock_fd)", "def close(self):\n\n if self._buffer:\n self.flush()\n self._handle.write(_bgzf_eof)\n self._handle.flush()\n self._handle.close()", "def cleanup(self):\n self.io.close()", "def _refresh_buffers(self) -> None:", "def _flush_buffer(self):\n pass", "def close(self) -> None:\n self.pages = []\n for fo, _reader in self.inputs:\n fo.close()\n\n self.inputs = []\n self.output = None", "def _close_stdio(self, log_path: PurePath):\n for attr, writable in ((\"stdin\", False), (\"stdout\", True), (\"stderr\", True)):\n # Close the old.\n fd = getattr(sys, attr)\n fileno = fd.fileno()\n fd.flush()\n fd.close()\n\n # Open the new.\n temp_fd = safe_open(log_path, \"a\") if writable else open(os.devnull)\n os.dup2(temp_fd.fileno(), fileno)\n setattr(sys, attr, os.fdopen(fileno, mode=(\"w\" if writable else \"r\")))\n sys.__stdin__, sys.__stdout__, sys.__stderr__ = sys.stdin, sys.stdout, sys.stderr # type: ignore[assignment,misc]", "def _unblock_open_fifo_operation(self) -> None:\n if os.path.exists(self._fifo_out_path):\n open(self._fifo_out_path, 'wb', buffering=0)\n if os.path.exists(self._fifo_in_path):\n open(self._fifo_in_path, 'rb', buffering=0)", "def _flush(self):\n pass", "def reset_output_buffer(self):\n self._main_buffer = BufferUtils.create_buffer()\n self._secondary_buffer = BufferUtils.create_buffer()", "def _open_fd_r(self):\n self.fd = os.open(self.proxy, os.O_RDONLY)", "def _open_fd_rw(self):\n self.fd = os.open(self.proxy, os.O_RDWR)", "def captured_std_streams() -> ContextManager[Tuple[TextIO, TextIO, TextIO]]:\n stdin_r, stdin_w = os.pipe()\n stdout_r, stdout_w = os.pipe()\n stderr_r, stderr_w = os.pipe()\n stdin_old, stdout_old, stderr_old = sys.stdin, sys.stdout, sys.stderr\n\n # We close the files explicitly at the end of ths scope.\n sys.stdin = os.fdopen(stdin_r, closefd=False)\n sys.stdout = os.fdopen(stdout_w, \"w\", closefd=False)\n sys.stderr = os.fdopen(stderr_w, \"w\", closefd=False)\n try:\n yield os.fdopen(stdin_w, \"w\"), os.fdopen(stdout_r), os.fdopen(stderr_r)\n finally:\n sys.stdout.flush()\n sys.stderr.flush()\n os.close(stdin_r)\n os.close(stdout_w)\n os.close(stderr_w)\n sys.stdin, sys.stdout, sys.stderr = stdin_old, stdout_old, stderr_old", "def open(self):\n if self.__stream is None:\n self.__open() # instantiate stream object\n self.__stream.start_stream() # reactivate collecting samples", "def finalize(self):\n if self._writer:\n self.flush()\n if self._archive_file:\n self._archive_file.close()", "def _stream(self):\n logger.info('getting meta-data')\n while not self.handle.has_metadata():\n time.sleep(0.1)\n\n #self.handle.rename_file(0, 'test.mp4')\n\n while not self.handle.is_seed():\n stat = self.handle.status()\n\n print 'downloading %.2f%%'%(stat.progress * 100)\n sys.stdout.flush()\n\n time.sleep(1)", "def start(self):\n # Create a pipe so the stream can be captured:\n self.pipe_out, self.pipe_in = os.pipe()\n self.capturedtext = \"\"\n\n # Save a copy of the stream:\n self.streamfd = os.dup(self.origstreamfd)\n\n # Replace the original stream with our write pipe:\n os.dup2(self.pipe_in, self.origstreamfd)", "def dispose(self):\n \n pass", "def dispose(self):\n \n pass", "def dispose(self):\n \n pass", "def stream_reset(self, stream_id):\n if stream_id in self.flow_control_futures:\n future = self.flow_control_futures.pop(stream_id)\n future.cancel()", "def __init__ (self, istream) :\r\n ReaderA.__init__(self) # call parent\r\n self.is_ = istream\r\n self.cached_ = CircularBuffer(132, True)", "def flush(self):", "def flush(self):", "def flush(self):", "def flush(self):", "def flush(self):", "def flush(self):", "def restore(self, reader):\n while True:\n msg = reader.read()\n if msg is None:\n break\n self.publish(msg)", "def __cleanup(self):\n wrappers = copy.copy(self.__wrappers)\n\n num = len(wrappers)\n for fd in wrappers:\n wrappers[fd].close()\n\n self.__wrappers = {}\n self.__disconnected_wrappers = []\n self.__logger.info(\"Closed %d IOWrappers\" % num)\n os.close(self.__wakeup_read)\n os.close(self.__wakeup_write)", "def _Close(self):\n self._compression_method = None", "def starting_stream(self, stream):\n self.cur_stream_observations = 0\n self.stream = stream", "def __del__(self):\n\n if self._is_open:\n self.close()", "def idle(self):\n stream=self.get_stream()\n if stream:\n stream.idle()", "def _sync(self):\n\n self.outfile.close()\n self.outfile = open(self.messenger.name + '-acceptor.out', 'a+')", "def _close_writable(self):\n for future in as_completed(self._write_futures):\n future.result()", "def refresh(self):\r\n # todo, use vid_info as property instead of this\r\n # reset properties and rebuild streams\r\n self.setup()" ]
[ "0.7697773", "0.6703574", "0.6626337", "0.6617042", "0.654685", "0.65099376", "0.6501303", "0.6500059", "0.64899385", "0.64335364", "0.6267602", "0.6215058", "0.61688745", "0.61634976", "0.61516905", "0.6137321", "0.61222017", "0.6106612", "0.6052286", "0.6048962", "0.6047413", "0.6017763", "0.59917367", "0.5988156", "0.5924426", "0.5905205", "0.58992106", "0.587251", "0.5841605", "0.583016", "0.5817487", "0.581146", "0.58111036", "0.58029896", "0.57862824", "0.5759722", "0.57526696", "0.57498443", "0.5718707", "0.56865656", "0.56708807", "0.56459653", "0.5644492", "0.5637213", "0.5616815", "0.56073", "0.5606763", "0.5603988", "0.56010675", "0.559786", "0.5595871", "0.5592947", "0.5584493", "0.5573997", "0.5569934", "0.55696493", "0.55664355", "0.55661905", "0.55661035", "0.55598927", "0.5558608", "0.5549723", "0.5540623", "0.5528556", "0.551434", "0.5508505", "0.55060124", "0.55057067", "0.5492155", "0.5492031", "0.54844195", "0.54834384", "0.54764825", "0.54749763", "0.54746115", "0.547038", "0.54680574", "0.54528666", "0.54518014", "0.5450622", "0.54485583", "0.54485583", "0.54485583", "0.5448482", "0.544682", "0.5442299", "0.5442299", "0.5442299", "0.5442299", "0.5442299", "0.5442299", "0.5441988", "0.5437036", "0.54330444", "0.5423911", "0.54171914", "0.5414329", "0.5407537", "0.5405202", "0.54043555" ]
0.80338305
0
it's required that the object be in SORT1
def finalize(self): self.set_as_sort1()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sorted(self): \n pass", "def sortby(self):\n ...", "def reversesort(self):\n ...", "def set_as_sort1(self):\n if self.is_sort1:\n #if self.analysis_code == 1:\n #self.nonlinear_factor = np.nan\n #print(self.data_code)\n #print(self._times, type(self._times))\n #aaa\n return\n #print(f'{self.class_name}-{self.table_name}')\n self.table_name = SORT2_TABLE_NAME_MAP[self.table_name]\n self.sort_bits[1] = 0 # sort1\n self.sort_method = 1\n assert self.is_sort1 is True, self.is_sort1\n self._update_time_word()", "def cmp ( self, object1, object2 ):\n return cmp( object1[ self.index ], object2[ self.index ] )", "def data_for_sorting() -> NoReturn:\n raise NotImplementedError", "def data_for_sorting() -> NoReturn:\n raise NotImplementedError", "def test_insertSort2(self):\n\t\tsortObj=insertSort()\n\t\tself.assertNotEqual(sortObj.run_sort(self.test_2[0]),self.test_2[1])", "def test_insertSort(self):\n\t\tsortObj=insertSort()\n\t\tself.assertEqual(sortObj.run_sort(self.test_1[0]),self.test_1[1])", "def sort_1(l):\n pass", "def sort(self):\r\n return self.sort_targets([self])", "def test_insertSort3(self):\n\t\tsortObj=insertSort()\n\t\tself.assertEqual(sortObj.run_sort(self.test_3[0]),self.test_3[1])", "def oldsortslice(self):\n ...", "def __cmp__(self,o):\n\t\treturn cmp(self.weight,o.weight)", "def cmp ( self, object1, object2 ):\n return cmp( self.get_raw_value( object1 ),\n self.get_raw_value( object2 ) )", "def test_calc_sort_without_after_object(self):\n test_object = self.test.datum_type2\n actual = test_object._calc_sort_value(sort_base_length=3,\n increment=1,\n sort_prefix_parts=[test_object.datum_group.sort]\n )\n expected = 10101\n self.assertEqual(expected, actual)", "def test_calc_sort_with_after_object(self):\n test_object = self.test.datum_type2\n actual = test_object._calc_sort_value(after_object=self.test.datum_type1,\n sort_base_length=3,\n increment=1,\n sort_prefix_parts=[test_object.datum_group.sort]\n )\n expected = 10101\n self.assertEqual(expected, actual)", "def test_get_sort_value_without_after_object(self):\n test_object = self.test.datum_type2\n actual = test_object.get_sort_value()\n expected = 10101\n self.assertEqual(expected, actual)", "def operator(self, sort):\r\n return None", "def sort(self):\n # Sort here actually uses the tuple comparison we defined in the Card class\n self.cards.sort()", "def test_get_sort_value_with_after_object(self):\n test_object = self.test.datum_type2\n actual = test_object.get_sort_value(after_object=self.test.datum_type1)\n expected = 10101\n self.assertEqual(expected, actual)", "def custom_sort(arr):\n pass", "def sort():\n return -1", "def sort_results(self):\n pass", "def test_parse_sort(self):\n old_type = Sort('sort', [\n Relationship('student', PersonSchema(), None),\n Relationship('school', StudentSchema(), None)],\n Attribute('title', SchoolSchema(), None), '+')\n new_type = self.driver.parse(old_type)\n\n assert new_type.source == old_type\n assert old_type.relationships != new_type.relationships\n assert isinstance(new_type.relationships[0], Mapper)\n assert old_type.attribute != new_type.attribute\n assert isinstance(new_type.attribute, Column)\n assert old_type.direction == new_type.direction", "def sort_key(self):\n ...", "def sorted_map_objects(self, point, objects):\n # print objects\n # print len(objects)\n if isinstance(objects, list):\n sorted_objects = objects\n else:\n sorted_objects = [j for i in objects for j in i]\n if len(sorted_objects) > 0:\n sorted_objects.sort(key=lambda x: x.distance(point.get_location()), reverse=False)\n return sorted_objects", "def __init__(self): \r\n self.students = [] #list of students\r\n self.grades = {} #id Num -> list of grades\r\n self.isSorted = True", "def __init__(self, data, draw, speed):\n self.heap_sort(data, draw, speed)", "def __init__(self):\n self.data = SortedList()", "def __init__(self):\n self.students=[]\n self.grades={}\n self.isSorted=True", "def __cmp__(self, other):\n _, _ = self, other\n return 0", "def _sort_dataframe(self, dataframe):\r\n print('Not allowed')", "def test_12(self):\n num_elements = np.random.randint(1, 11)\n\n input_array = np.random.normal(size=num_elements)\n\n # We first check the sorting implementation.\n py = sorted(input_array)\n f90 = fort_debug.wrapper_sorted(input_array, num_elements)\n assert_equal(py, f90)\n\n params_spec, options_spec = generate_random_model()\n respy_obj = RespyCls(params_spec, options_spec)\n\n edu_spec, optim_paras, num_types = dist_class_attributes(\n respy_obj, \"edu_spec\", \"optim_paras\", \"num_types\"\n )\n\n args = (edu_spec[\"start\"], edu_spec[\"share\"], edu_spec[\"max\"])\n f90 = fort_debug.wrapper_sort_edu_spec(*args)\n py = sort_edu_spec(edu_spec)\n for i, label in enumerate([\"start\", \"share\", \"max\"]):\n assert_equal(py[label], f90[i])\n\n py = sort_type_info(optim_paras, num_types)\n f90 = fort_debug.wrapper_sort_type_info(optim_paras[\"type_shares\"], num_types)\n for i, label in enumerate([\"order\", \"shares\"]):\n assert_equal(py[label], f90[i])", "def __cmp__(self,o):\n\t\tif o != None:\n\t\t\treturn cmp(self.value,o.value)\n\t\telse:\n\t\t\treturn cmp(self.value,0)", "def toSortedArray(self) -> List[ghidra.util.graph.KeyedObject]:\n ...", "def sort(self, *args: Any, **kwargs: Any) -> BaseList:\n super().sort(*args, **kwargs)\n return self", "def test_sorting():\n circles = [Circle(i) for i in range(10, 1, -1)] \n sorted_circles = sorted(circles, key=Circle.sort_key)\n assert circles != sorted_circles", "def test_one_element_input(self):\n res = merge_sort([1])\n self.assertEqual(res, [1])", "def resort_couplings(J,sortIx):\n return", "def reorder( self ):\n self.sorted.sort(self.compareFunction)", "def __init__(self):\n self.students = []\n self.grades = {}\n self.is_sorted = True", "def _init_sorted_slice(self, *args, **kwargs): # real signature unknown\n pass", "def sort(self,desc):\n\tself.__sort(\"\",\"\",desc)", "def __cmp__(self, x):\n if self.score < x.score: return -1\n elif self.score == x.score: return 0\n else: return 1", "def data_missing_for_sorting() -> NoReturn:\n raise NotImplementedError", "def data_missing_for_sorting() -> NoReturn:\n raise NotImplementedError", "def deep_cmp(obj1, obj2):\n pass", "def __SortLists(self): \n\n \n AS=argsort(self.__NumList)\n\n self.__IndList=[self.__IndList[i] for i in AS]#list(self.__IndList[AS])\n self.__ObjList=[self.__ObjList[i] for i in AS]#list(self.__IndList[AS])\n self.__NumList=[self.__NumList[i] for i in AS]", "def check_sort(self):\n if self.list == []:\n return True\n seg_iter = iter(self.list)\n last = next(seg_iter)\n for segment in seg_iter:\n if last > segment:\n raise Exception('non triรฉ')\n last = segment\n return True", "def __init__(self):\n self.students = []\n self.grades = {}\n self.isSorted = True", "def testObjectReorder(self):\n self.person.invokeFactory(type_name=\"FSDCourse\", id=\"course1\")\n self.person.invokeFactory(type_name=\"FSDCourse\", id=\"course2\")\n self.person.invokeFactory(type_name=\"FSDCourse\", id=\"course3\")\n self.person.moveObjectsByDelta(['course3'], -100)\n self.failUnless(self.person.getObjectPosition('course3') == 0, \"FSDCourse Subobject 'course3' should be at position 0.\")", "def test_sort(self):\n # Create a new REANATemplate with an empty workflow specification and\n # a list of five parameters\n template = REANATemplate(\n workflow_spec={},\n parameters=[\n pd.parameter_declaration('A', index=1),\n pd.parameter_declaration('B'),\n pd.parameter_declaration('C'),\n pd.parameter_declaration('D', index=2),\n pd.parameter_declaration('E', index=1)\n ],\n validate=True\n )\n # Get list of sorted parameter identifier from listing\n keys = [p.identifier for p in template.list_parameter()]\n self.assertEqual(keys, ['B', 'C', 'A', 'E', 'D'])", "def object_list(self):\n\n def _sort(ob, ol):\n reverse = ob.startswith(\"-\")\n ob = ob[1:] if reverse else ob\n for column in self.columns:\n if column.sort_key_fn is not None and column.name == ob:\n return sorted(ol, key=column.sort_key_fn, reverse=reverse)\n if self._meta.order_by and hasattr(ol, \"order_by\"):\n return ol.order_by(*self._meta.order_by.split(\"|\"))\n return ol\n\n ol = self._object_list\n ob = self._meta.order_by\n if not ob: return ol\n if isinstance(ob, basestring):\n return _sort(ob, ol)\n elif isinstance(ob, list):\n ob.reverse()\n for fn in ob:\n ol = _sort(fn, ol)\n return ol", "def keysort(*args, **kwargs): # real signature unknown\n pass", "def sort_table(table, sats_table):", "def sort(self, key_func):\n pass", "def __str__(self):\n return 'predsort'", "def sort(self):\n self.cards.sort()", "def sort(self):\n self.cards.sort()", "def sort_by_default(self):\n self.data.sort()", "def comparator(self) -> typing.Callable[[Vec, Vec, Term], bool]:\n pass", "def _sort(self):\n self.population.sort()\n self.population.reverse()", "def order_filter(self,elements):", "def sorted(x) -> List:\n pass", "def test_sort(self):\n a, b, c, d = Node('a'), Node('b'), Node('c'), Node('d')\n a | b | c\n a * 'foo' | 'bar' * c\n d | 'baz' * b\n nodes = topo_sort([a, d])\n self.assertEqual(set(nodes[:2]), set([a, d]))\n self.assertEqual(nodes[2:], [b, c])", "def __cmp__(self, other):\n if self.get_id() < other.get_id():\n return -1\n elif self.get_id() == self.get_id():\n return 0\n else:\n return 1", "def objects(self):", "def sort_0(l):\n l.sort()", "def add_sort1(self, dt, eid, grid, angle, sc, sd, se, sf, omax, omin, mst, msc):\n assert isinstance(eid, integer_types) and eid > 0, 'dt=%s eid=%s' % (dt, eid)\n self._times[self.itime] = dt\n self.data[self.itime, self.itotal] = [angle, sc, sd, se, sf, omax, omin, mst, msc]\n self.element_node[self.itotal] = [eid, grid]\n #self.ielement += 1\n self.itotal += 1", "def sort_list(self,list_):\r\n list_.sort()", "def add_sort1(self, dt, eid, grid, angle, sc, sd, se, sf):\n assert isinstance(eid, integer_types) and eid > 0, 'dt=%s eid=%s' % (dt, eid)\n self._times[self.itime] = dt\n self.data[self.itime, self.itotal] = [angle, sc, sd, se, sf]\n self.element_node[self.itotal] = [eid, grid]\n #self.ielement += 1\n self.itotal += 1", "def test_1(self):\r\n r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)\r\n o = MyOp.make_node(r1, r1)\r\n o2 = MyOp.make_node(o.outputs[0], r5)\r\n all = general_toposort(o2.outputs, prenode)\r\n assert all == [r5, r1, o, o.outputs[0], o2, o2.outputs[0]]", "def compute_sort_id_assignment(lang, start=0):\n bounds, ids = {}, {}\n _compute_id_assignment(lang, lang.Object, ids, bounds, compute_direct_sort_map(lang), start=start)\n return bounds, ids", "def run(self):\n self.model.sort(0)\n self.sort_object.task_complete.emit()", "def sort_priors(self):\n return", "def __init__(self):\n self._keys = []\n self._sortKeys = []", "def __cmp__(self, other):\n return cmp(self.id, other.id)", "def order(self):\n raise NotImplementedError()", "def __cmp__(self, other):\n if not isinstance(other, PolyhedronRepresentation):\n return -1\n return cmp(type(self), type(other)) or cmp(self._vector, other._vector)", "def __cmp__(self, other):\n\t\treturn cmp (self.name, other.name)", "def test_one():\n run_mergesort([1], [1])", "def pre_sort(self, qs):\n return qs", "def get_sort_query(self, kind, order, is_number):\n pass", "def post_sort(self, qs):\n return qs", "def sort(self, sort):\n\n self._sort = sort", "def sort(self):\r\n\t\treturn sorted(self.sample)", "def __init__(self, compare=cmp):\n self.compare = compare\n self.array = []\n self.pos = {}", "def toSortedArray(self, keyedObjects: List[ghidra.util.graph.KeyedObject]) -> List[ghidra.util.graph.KeyedObject]:\n ...", "def sub_comparison(obj1,obj2,translate):\n return [Difference(f\"{obj1.__class__.__name__} > {meth.__name__}\",result) for (meth,attr) in translate if (result := meth(getattr(obj1,attr),getattr(obj2,attr))) is not None]", "def __lt__(self, other):\n\n return self._ordinals < other.ordinal()", "def sort(self):\n self.notes.sort()", "def __init__(self, new_sorts, supersorts):\r\n global crt_sorts\r\n crt_sorts = new_sorts\r\n \r\n super(SortDecl, self).__init__()\r\n self.new_sorts = new_sorts\r\n self.supersorts = supersorts", "def sorted_traversal(self):\n\t\tself.__sorted_traversal(self)", "def orderby():\n pass", "def sort(self, *args, **kargs):\n list.sort(self, *args, **kargs)\n self.emit('modified')", "def test_two_element_input(self):\n res = merge_sort([2, 1])\n self.assertEqual(res, [1, 2])", "def precalculate(self):\n self._sorted = np.argsort(\n self.base[self._array], kind='mergesort') # mergesort for stability\n self._boundaries = util.find_boundaries(\n self.base[self._array][self._sorted])", "def __cmp__(self, other):\n s = self[0:2]\n try:\n o = other[0:2]\n except:\n o = (other,)\n if s != o:\n return -1 if s < o else 1\n try:\n if self.data == other.data:\n return 0\n return -1 if self.data < other.data else 1\n except TypeError:\n s = type(self.data).__name__\n o = type(other.data).__name__\n if s == o:\n return 0\n return -1 if s < o else 1", "def __init__(self, new_sorts, supersorts):\n global crt_sorts\n crt_sorts = new_sorts\n \n super(SortDecl, self).__init__()\n self.new_sorts = new_sorts\n self.supersorts = supersorts" ]
[ "0.6844052", "0.6644081", "0.64807296", "0.63492376", "0.6294889", "0.6287909", "0.6287909", "0.62629074", "0.6159198", "0.6130732", "0.60078466", "0.6003925", "0.58911306", "0.5833647", "0.5817837", "0.57982904", "0.5785331", "0.5744834", "0.5734182", "0.57251596", "0.56913763", "0.5667243", "0.565625", "0.5633989", "0.5605356", "0.5547251", "0.5539671", "0.553549", "0.55115646", "0.5475472", "0.54742104", "0.54372007", "0.5436672", "0.5427795", "0.54121286", "0.54025435", "0.5400598", "0.5399121", "0.53990436", "0.5370075", "0.5367525", "0.53485256", "0.53473747", "0.5347202", "0.5337552", "0.5315501", "0.5315501", "0.53124446", "0.5305", "0.5280677", "0.52543175", "0.52491564", "0.52468014", "0.52381414", "0.5237486", "0.5235297", "0.5228534", "0.5226959", "0.5218224", "0.5218224", "0.52174026", "0.52036047", "0.5192699", "0.5185656", "0.5170991", "0.5166705", "0.51481384", "0.5143896", "0.51432025", "0.51346093", "0.51346076", "0.5126009", "0.5121452", "0.5117598", "0.5116085", "0.5111196", "0.51015455", "0.5099645", "0.50983393", "0.5097854", "0.50895894", "0.50867975", "0.5082967", "0.5079778", "0.5077009", "0.50728375", "0.5071417", "0.50698274", "0.50577116", "0.50557196", "0.50506896", "0.5050461", "0.5044355", "0.504415", "0.50424904", "0.50278974", "0.5026034", "0.50224155", "0.5021405", "0.50205445" ]
0.60771483
10
the data is in SORT1, but the flags are wrong
def set_as_sort1(self): if self.is_sort1: #if self.analysis_code == 1: #self.nonlinear_factor = np.nan #print(self.data_code) #print(self._times, type(self._times)) #aaa return #print(f'{self.class_name}-{self.table_name}') self.table_name = SORT2_TABLE_NAME_MAP[self.table_name] self.sort_bits[1] = 0 # sort1 self.sort_method = 1 assert self.is_sort1 is True, self.is_sort1 self._update_time_word()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def data_for_sorting() -> NoReturn:\n raise NotImplementedError", "def data_for_sorting() -> NoReturn:\n raise NotImplementedError", "def reversesort(self):\n ...", "def oldsortslice(self):\n ...", "def data_for_sorting():\n return RaggedArray([[1, 0], [2, 0], [0, 0]])", "def sortKey( self, mode, matrix ):\n # TODO: figure out how to handle \n return False,[],None", "def sort():\n return -1", "def _sort_index(self):\n\n allAltPos = np.array(sorted(list(set(list(self.data['altitude'])))))[::-1]\n allAziPos = np.array(sorted(list(set(list(self.data['azimuth'])))))\n\n indON = [[None for azi in allAziPos] for alt in allAltPos]; indOFF = [[None for azi in allAziPos] for alt in allAltPos]\n\n for i, traceItem in enumerate(self.data):\n alt = traceItem['altitude'];azi = traceItem['azimuth'];sign = traceItem['sign']\n for j, altPos in enumerate(allAltPos):\n for k, aziPos in enumerate(allAziPos):\n if alt==altPos and azi==aziPos:\n if sign==1:\n if indON[j][k] is not None: raise LookupError('Duplication of trace items found at location:'+str([alt, azi])+'; sign: 1!')\n else: indON[j][k]=i\n\n if sign==-1:\n if indOFF[j][k] is not None: raise LookupError('Duplication of trace items found at location:'+str([alt, azi])+'; sign:-1!')\n else: indOFF[j][k]=i\n\n indON = np.array([np.array(x) for x in indON]); indOFF = np.array([np.array(x) for x in indOFF])\n\n return indON,indOFF,allAltPos,allAziPos", "def extract_pattern(DataL, pattern, min_distance, max_distance, threshold):\n from collections import defaultdict\n DataL = [list(line) for line in DataL]\n n = len(pattern)\n\n # Create a string with all signs (5th column in the sorted BED file)\n signs = \"\"\n for line in DataL:\n signs += line[5]\n\n if pattern == \"basic\":\n same_total=0;opposite_total=0;\n same=0;opposite=0;\n SameL = defaultdict(int); OppositeL= defaultdict(int);\n DistancesL_same=[];DistancesL_opposite=[];DataSignificantL_same=[];DataSignificantL_opposite=[];\n DataL_temp = [DataL[0]]\n\n for i in range(1, len(DataL)):\n distance = max(0, int(DataL[i][1]) - int(DataL[i-1][2]))\n if (distance >= min_distance and distance <= max_distance):\n \n if DataL[i-1][5]==DataL[i][5] and DataL[i-1][5] in [\"+\",\"-\"]:\n same+=1;same_total+=1\n DistancesL_same.append(distance)\n\n if opposite>0:\n OppositeL[opposite]+=1\n if 0.5**opposite<threshold:\n DataSignificantL_opposite+=DataL_temp\n DataL_temp=[];\n\n DataL_temp+=[DataL[i]]\n opposite=0;\n \n \n if (DataL[i-1][5]==\"+\" and DataL[i][5]==\"-\") or (DataL[i-1][5]==\"-\" and DataL[i][5]==\"+\"): \n opposite+=1;opposite_total+=1;\n DistancesL_opposite.append(distance)\n\n if same>0:\n SameL[same]+=1\n if 0.5**same<threshold:\n DataSignificantL_same+=DataL_temp\n DataL_temp=[];\n \n DataL_temp+=[DataL[i]]\n same=0;\n\n else:\n \n if same > 0:\n SameL[same]+=1\n if 0.5**same<threshold:\n DataSignificantL_same+=DataL_temp\n\n if opposite > 0:\n OppositeL[opposite]+=1\n if 0.5**opposite<threshold:\n DataSignificantL_opposite+=DataL_temp\n\n DataL_temp=[];\n same=0;opposite = 0;\n \t\n return SameL,OppositeL,DistancesL_same,DistancesL_opposite,DataSignificantL_same,DataSignificantL_opposite,same_total,opposite_total\n\n # Find all occurences of the pattern in the string of signs without accounting for distances\n occs = list(find_sub_str(signs, pattern))\n\n # Remove occurences that do not meet the distance criterion\n for i in range(len(occs)):\n index = occs[i]\n for j in range(n - 1):\n distance = max(0, int(DataL[index + j + 1][1]) - int(DataL[index + j][2]))\n if distance < min_distance or distance > max_distance:\n occs[i] = None\n break\n occs = [x for x in occs if x is not None]\n\n # If no occurence of the pattern was found, return empty lists\n if not occs:\n return [], [], []\n\n # Here we translate the probability threshold to consecutive occcurrences\n number_of_tests = len(DataL)\n total_plus = signs.count(\"+\")\n total_minus = signs.count(\"-\")\n probability = {}\n probability[\"+\"] = total_plus / float(total_plus + total_minus)\n probability[\"-\"] = 1 - probability[\"+\"]\n probability_pattern = np.prod([probability[k] for k in list(pattern)])\n # Warn the user if probability_pattern == 1\n if probability_pattern == 1:\n msg = \"The probability of this pattern being found at any given point of the file was calculated to be 1. \" \\\n \"This usually would happen in degenerate cases (e.g. looking for pattern \\\"+\\\" in a file consisting \" \\\n \"only of + and could result in unpredictable outputs\"\n warnings.warn(msg)\n # Lowest consecutive number of patterns that with probability of appearning lower than the threshold (p-value)\n from math import ceil, log\n consecutive_threshold = ceil(log(threshold / number_of_tests) / log(probability_pattern))\n\n # Filter for number of consecutive occurences that meet the threshold criterion and are within the distance window\n DataL_significant = []\n counter = 1\n DataL_temp = DataL[occs[0]:occs[0] + n];\n consecutiveL = defaultdict(int);\n for i in range(1, len(occs)):\n index = occs[i]\n distance = max(0, int(DataL[index][1]) - int(DataL[index - 1][2]))\n if occs[i] - occs[i - 1] == n and (distance >= min_distance and distance <= max_distance):\n counter += 1\n DataL_temp.extend(DataL[index:index + n])\n else:\n consecutiveL[counter] += 1\n if counter >= consecutive_threshold:\n # Add p_value to list for the number of consecutive occurences of the pattern\n p_value = (probability_pattern ** counter) * number_of_tests\n for line in DataL_temp:\n line.append(p_value)\n DataL_significant.extend(DataL_temp)\n DataL_temp.clear()\n counter = 1\n\n # Add last lines\n if counter >= consecutive_threshold:\n p_value = (probability_pattern ** counter) * number_of_tests\n for line in DataL_temp:\n line.append(p_value)\n DataL_significant.extend(DataL_temp)\n consecutiveL[counter] += 1\n\n distancesL = []\n for i in range(len(occs) - 1):\n index = occs[i]\n index_next = occs[i + 1]\n distance = max(0, int(DataL[occs[i + 1]][1]) - int(DataL[occs[i] + n - 1][2]))\n distancesL.append(distance)\n\n return consecutiveL, distancesL, DataL_significant", "def sortarai(self, datablock, s, Zdiff):\n\n first_Z, first_I, zptrm_check, ptrm_check, ptrm_tail = [], [], [], [], []\n field, phi, theta = \"\", \"\", \"\"\n starthere = 0\n Treat_I, Treat_Z, Treat_PZ, Treat_PI, Treat_M, Treat_AC = [], [], [], [], [], []\n ISteps, ZSteps, PISteps, PZSteps, MSteps, ACSteps = [], [], [], [], [], []\n GammaChecks = [] # comparison of pTRM direction acquired and lab field\n Mkeys = ['measurement_magn_moment', 'measurement_magn_volume',\n 'measurement_magn_mass', 'measurement_magnitude']\n rec = datablock[0]\n for key in Mkeys:\n if key in list(rec.keys()) and rec[key] != \"\":\n momkey = key\n break\n # first find all the steps\n for k in range(len(datablock)):\n rec = datablock[k]\n if 'treat_mw_step' in list(rec.keys()) and rec['treat_mw_step'] is None: rec['treat_mw_step']=\"\"\n if 'treatment_mw_integral' in list(rec.keys()) and rec['treatment_mw_integral'] is None: rec['treatment_mw_integral']=\"\"\n if 'treatment_mw_power' in list(rec.keys()) and rec['treatment_mw_power'] is None: rec['treatment_mw_power']=\"\"\n if 'treatment_temp' in list(rec.keys()) and rec['treatment_temp'] is None:rec['treatment_temp']=\"\"\n if \"treat_mw_step\" in list(rec.keys()) and rec[\"treat_mw_step\"]!=\"\":\n\n THERMAL = False\n MICROWAVE = True\n temp = float(rec[\"treat_mw_step\"])\n elif \"treatment_mw_integral\" in list(rec.keys()) and rec[\"treatment_mw_integral\"]!=\"\":\n THERMAL = False\n MICROWAVE = True\n if \"measurement_description\" in list(rec.keys()):\n MW_step = rec[\"measurement_description\"].strip(\n '\\n').split(\":\")\n for STEP in MW_step:\n if \"Number\" in STEP:\n temp = float(STEP.split(\"-\")[-1])\n elif \"treatment_mw_power\" in list(rec.keys()) and rec[\"treatment_mw_power\"]!=\"\":\n THERMAL = False\n MICROWAVE = True\n if \"measurement_description\" in list(rec.keys()):\n MW_step = rec[\"measurement_description\"].strip(\n '\\n').split(\":\")\n for STEP in MW_step:\n if \"Number\" in STEP:\n temp = float(STEP.split(\"-\")[-1])\n elif \"treatment_temp\" in list(rec.keys()) and rec[\"treatment_temp\"]!=\"\":\n temp = float(rec[\"treatment_temp\"])\n THERMAL = True\n MICROWAVE = False\n methcodes = []\n tmp = rec[\"magic_method_codes\"].split(\":\")\n for meth in tmp:\n methcodes.append(meth.strip())\n # for thellier-thellier\n if 'LT-T-I' in methcodes and 'LP-PI-TRM' in methcodes and 'LP-TRM' not in methcodes:\n Treat_I.append(temp)\n ISteps.append(k)\n if field == \"\":\n field = float(rec[\"treatment_dc_field\"])\n if phi == \"\":\n phi = float(rec['treatment_dc_field_phi'])\n theta = float(rec['treatment_dc_field_theta'])\n\n # for Microwave\n if 'LT-M-I' in methcodes and 'LP-PI-M' in methcodes:\n Treat_I.append(temp)\n ISteps.append(k)\n if field == \"\":\n field = float(rec[\"treatment_dc_field\"])\n if phi == \"\":\n phi = float(rec['treatment_dc_field_phi'])\n theta = float(rec['treatment_dc_field_theta'])\n\n # stick first zero field stuff into first_Z\n if 'LT-NO' in methcodes:\n Treat_Z.append(temp)\n ZSteps.append(k)\n if \"LT-AF-Z\" in methcodes and 'treatment_ac_field' in list(rec.keys()):\n if rec['treatment_ac_field'] != \"\":\n AFD_after_NRM = True\n # consider AFD before T-T experiment ONLY if it comes before\n # the experiment\n for i in range(len(first_I)):\n # check if there was an infield step before the AFD\n if float(first_I[i][3]) != 0:\n AFD_after_NRM = False\n if AFD_after_NRM:\n AF_field = 0\n if 'treatment_ac_field' in rec:\n try:\n AF_field = float(rec['treatment_ac_field']) * 1000\n except ValueError:\n pass\n\n dec = float(rec[\"measurement_dec\"])\n inc = float(rec[\"measurement_inc\"])\n intensity = float(rec[momkey])\n first_I.append([273. - AF_field, 0., 0., 0., 1])\n first_Z.append(\n [273. - AF_field, dec, inc, intensity, 1]) # NRM step\n if 'LT-T-Z' in methcodes or 'LT-M-Z' in methcodes:\n Treat_Z.append(temp)\n ZSteps.append(k)\n if 'LT-PTRM-Z':\n Treat_PZ.append(temp)\n PZSteps.append(k)\n if 'LT-PTRM-I' in methcodes or 'LT-PMRM-I' in methcodes:\n Treat_PI.append(temp)\n PISteps.append(k)\n if 'LT-PTRM-MD' in methcodes or 'LT-PMRM-MD' in methcodes:\n Treat_M.append(temp)\n MSteps.append(k)\n if 'LT-PTRM-AC' in methcodes or 'LT-PMRM-AC' in methcodes:\n Treat_AC.append(temp)\n ACSteps.append(k)\n if 'LT-NO' in methcodes:\n dec = float(rec[\"measurement_dec\"])\n inc = float(rec[\"measurement_inc\"])\n moment = float(rec[\"measurement_magn_moment\"])\n if 'LP-PI-M' not in methcodes:\n first_I.append([273, 0., 0., 0., 1])\n first_Z.append([273, dec, inc, moment, 1]) # NRM step\n else:\n first_I.append([0, 0., 0., 0., 1])\n first_Z.append([0, dec, inc, moment, 1]) # NRM step\n\n #---------------------\n # find IZ and ZI\n #---------------------\n\n for temp in Treat_I: # look through infield steps and find matching Z step\n if temp in Treat_Z: # found a match\n istep = ISteps[Treat_I.index(temp)]\n irec = datablock[istep]\n methcodes = []\n tmp = irec[\"magic_method_codes\"].split(\":\")\n for meth in tmp:\n methcodes.append(meth.strip())\n # take last record as baseline to subtract\n brec = datablock[istep - 1]\n zstep = ZSteps[Treat_Z.index(temp)]\n zrec = datablock[zstep]\n # sort out first_Z records\n # check if ZI/IZ in in method codes:\n ZI = \"\"\n if \"LP-PI-TRM-IZ\" in methcodes or \"LP-PI-M-IZ\" in methcodes or \"LP-PI-IZ\" in methcodes:\n ZI = 0\n elif \"LP-PI-TRM-ZI\" in methcodes or \"LP-PI-M-ZI\" in methcodes or \"LP-PI-ZI\" in methcodes:\n ZI = 1\n elif \"LP-PI-BT-IZZI\" in methcodes:\n ZI == \"\"\n i_intex, z_intex = 0, 0\n foundit = False\n for i in range(len(datablock)):\n if THERMAL:\n if ('treatment_temp' in list(datablock[i].keys()) and float(temp) == float(datablock[i]['treatment_temp'])):\n foundit = True\n if MICROWAVE:\n if ('treat_mw_step' in list(datablock[i].keys())):\n ThisStep=float(datablock[i]['treat_mw_step'])\n if ThisStep == float(temp):\n foundit = True\n\n elif ('measurement_description' in list(datablock[i].keys())):\n MW_step = datablock[i][\"measurement_description\"].strip(\n '\\n').split(\":\")\n for STEP in MW_step:\n if \"Number\" in STEP:\n ThisStep = float(STEP.split(\"-\")[-1])\n if ThisStep == float(temp):\n foundit = True\n if foundit:\n if \"LT-T-Z\" in datablock[i]['magic_method_codes'].split(\":\") or \"LT-M-Z\" in datablock[i]['magic_method_codes'].split(\":\"):\n z_intex = i\n if \"LT-T-I\" in datablock[i]['magic_method_codes'].split(\":\") or \"LT-M-I\" in datablock[i]['magic_method_codes'].split(\":\"):\n i_intex = i\n foundit = False\n\n if z_intex < i_intex:\n ZI = 1\n else:\n ZI = 0\n dec = float(zrec[\"measurement_dec\"])\n inc = float(zrec[\"measurement_inc\"])\n str = float(zrec[momkey])\n first_Z.append([temp, dec, inc, str, ZI])\n # sort out first_I records\n idec = float(irec[\"measurement_dec\"])\n iinc = float(irec[\"measurement_inc\"])\n istr = float(irec[momkey])\n X = pmag.dir2cart([idec, iinc, istr])\n BL = pmag.dir2cart([dec, inc, str])\n I = []\n for c in range(3):\n I.append((X[c] - BL[c]))\n if I[2] != 0:\n iDir = pmag.cart2dir(I)\n if Zdiff == 0:\n first_I.append([temp, iDir[0], iDir[1], iDir[2], ZI])\n else:\n first_I.append([temp, 0., 0., I[2], ZI])\n# gamma=angle([iDir[0],iDir[1]],[phi,theta])\n else:\n first_I.append([temp, 0., 0., 0., ZI])\n# gamma=0.0\n# put in Gamma check (infield trm versus lab field)\n# if 180.-gamma<gamma:\n# gamma=180.-gamma\n# GammaChecks.append([temp-273.,gamma])\n\n #---------------------\n # find Thellier Thellier protocol\n #---------------------\n if 'LP-PI-II'in methcodes or 'LP-PI-T-II' in methcodes or 'LP-PI-M-II' in methcodes:\n # look through infield steps and find matching Z step\n for i in range(1, len(Treat_I)):\n if Treat_I[i] == Treat_I[i - 1]:\n # ignore, if there are more than\n temp = Treat_I[i]\n irec1 = datablock[ISteps[i - 1]]\n dec1 = float(irec1[\"measurement_dec\"])\n inc1 = float(irec1[\"measurement_inc\"])\n moment1 = float(irec1[\"measurement_magn_moment\"])\n if len(first_I) < 2:\n dec_initial = dec1\n inc_initial = inc1\n cart1 = np.array(pmag.dir2cart([dec1, inc1, moment1]))\n irec2 = datablock[ISteps[i]]\n dec2 = float(irec2[\"measurement_dec\"])\n inc2 = float(irec2[\"measurement_inc\"])\n moment2 = float(irec2[\"measurement_magn_moment\"])\n cart2 = np.array(pmag.dir2cart([dec2, inc2, moment2]))\n\n # check if its in the same treatment\n if Treat_I[i] == Treat_I[i - 2] and dec2 != dec_initial and inc2 != inc_initial:\n continue\n if dec1 != dec2 and inc1 != inc2:\n zerofield = (cart2 + cart1) / 2\n infield = (cart2 - cart1) / 2\n\n DIR_zerofield = pmag.cart2dir(zerofield)\n DIR_infield = pmag.cart2dir(infield)\n\n first_Z.append(\n [temp, DIR_zerofield[0], DIR_zerofield[1], DIR_zerofield[2], 0])\n first_I.append(\n [temp, DIR_infield[0], DIR_infield[1], DIR_infield[2], 0])\n\n #---------------------\n # find pTRM checks\n #---------------------\n\n for i in range(len(Treat_PI)): # look through infield steps and find matching Z step\n\n temp = Treat_PI[i]\n k = PISteps[i]\n rec = datablock[k]\n dec = float(rec[\"measurement_dec\"])\n inc = float(rec[\"measurement_inc\"])\n moment = float(rec[\"measurement_magn_moment\"])\n phi = float(rec[\"treatment_dc_field_phi\"])\n theta = float(rec[\"treatment_dc_field_theta\"])\n M = np.array(pmag.dir2cart([dec, inc, moment]))\n\n foundit = False\n if 'LP-PI-II' not in methcodes:\n # Important: suport several pTRM checks in a row, but\n # does not support pTRM checks after infield step\n for j in range(k, 1, -1):\n if \"LT-M-I\" in datablock[j]['magic_method_codes'] or \"LT-T-I\" in datablock[j]['magic_method_codes']:\n after_zerofield = 0.\n foundit = True\n prev_rec = datablock[j]\n zerofield_index = j\n break\n if float(datablock[j]['treatment_dc_field']) == 0:\n after_zerofield = 1.\n foundit = True\n prev_rec = datablock[j]\n zerofield_index = j\n break\n else: # Thellier-Thellier protocol\n foundit = True\n prev_rec = datablock[k - 1]\n zerofield_index = k - 1\n if foundit:\n prev_dec = float(prev_rec[\"measurement_dec\"])\n prev_inc = float(prev_rec[\"measurement_inc\"])\n prev_moment = float(prev_rec[\"measurement_magn_moment\"])\n prev_phi = float(prev_rec[\"treatment_dc_field_phi\"])\n prev_theta = float(prev_rec[\"treatment_dc_field_theta\"])\n prev_M = np.array(pmag.dir2cart(\n [prev_dec, prev_inc, prev_moment]))\n\n if 'LP-PI-II' not in methcodes:\n diff_cart = M - prev_M\n diff_dir = pmag.cart2dir(diff_cart)\n if after_zerofield == 0:\n ptrm_check.append(\n [temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, after_zerofield])\n else:\n ptrm_check.append(\n [temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, after_zerofield])\n else:\n # health check for T-T protocol:\n if theta != prev_theta:\n diff = (M - prev_M) / 2\n diff_dir = pmag.cart2dir(diff)\n ptrm_check.append(\n [temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, \"\"])\n else:\n print(\n \"-W- WARNING: specimen. pTRM check not in place in Thellier Thellier protocol. step please check\")\n\n #---------------------\n # find Tail checks\n #---------------------\n\n for temp in Treat_M:\n # print temp\n step = MSteps[Treat_M.index(temp)]\n rec = datablock[step]\n dec = float(rec[\"measurement_dec\"])\n inc = float(rec[\"measurement_inc\"])\n moment = float(rec[\"measurement_magn_moment\"])\n foundit = False\n for i in range(1, len(datablock)):\n if 'LT-T-Z' in datablock[i]['magic_method_codes'] or 'LT-M-Z' in datablock[i]['magic_method_codes']:\n if (THERMAL and \"treatment_temp\" in list(datablock[i].keys()) and float(datablock[i][\"treatment_temp\"]) == float(temp))\\\n or (MICROWAVE and \"measurement_description\" in list(datablock[i].keys()) and \"Step Number-%.0f\" % float(temp) in datablock[i][\"measurement_description\"]):\n prev_rec = datablock[i]\n prev_dec = float(prev_rec[\"measurement_dec\"])\n prev_inc = float(prev_rec[\"measurement_inc\"])\n prev_moment = float(\n prev_rec[\"measurement_magn_moment\"])\n foundit = True\n break\n\n if foundit:\n ptrm_tail.append([temp, 0, 0, moment - prev_moment])\n\n #\n # final check\n #\n if len(first_Z) != len(first_I):\n print(len(first_Z), len(first_I))\n print(\" Something wrong with this specimen! Better fix it or delete it \")\n input(\" press return to acknowledge message\")\n\n #---------------------\n # find Additivity (patch by rshaar)\n #---------------------\n\n additivity_check = []\n for i in range(len(Treat_AC)):\n step_0 = ACSteps[i]\n temp = Treat_AC[i]\n dec0 = float(datablock[step_0][\"measurement_dec\"])\n inc0 = float(datablock[step_0][\"measurement_inc\"])\n moment0 = float(datablock[step_0]['measurement_magn_moment'])\n V0 = pmag.dir2cart([dec0, inc0, moment0])\n # find the infield step that comes before the additivity check\n foundit = False\n for j in range(step_0, 1, -1):\n if \"LT-T-I\" in datablock[j]['magic_method_codes']:\n foundit = True\n break\n if foundit:\n dec1 = float(datablock[j][\"measurement_dec\"])\n inc1 = float(datablock[j][\"measurement_inc\"])\n moment1 = float(datablock[j]['measurement_magn_moment'])\n V1 = pmag.dir2cart([dec1, inc1, moment1])\n # print \"additivity check: \",s\n # print j\n # print \"ACC=V1-V0:\"\n # print \"V1=\",[dec1,inc1,moment1],pmag.dir2cart([dec1,inc1,moment1])/float(datablock[0][\"measurement_magn_moment\"])\n # print \"V1=\",pmag.dir2cart([dec1,inc1,moment1])/float(datablock[0][\"measurement_magn_moment\"])\n # print \"V0=\",[dec0,inc0,moment0],pmag.dir2cart([dec0,inc0,moment0])/float(datablock[0][\"measurement_magn_moment\"])\n # print \"NRM=\",float(datablock[0][\"measurement_magn_moment\"])\n # print \"-------\"\n\n I = []\n for c in range(3):\n I.append(V1[c] - V0[c])\n dir1 = pmag.cart2dir(I)\n additivity_check.append([temp, dir1[0], dir1[1], dir1[2]])\n # print\n # \"I\",np.array(I)/float(datablock[0][\"measurement_magn_moment\"]),dir1,\"(dir1\n # unnormalized)\"\n X = np.array(I) / \\\n float(datablock[0][\"measurement_magn_moment\"])\n # print \"I\",np.sqrt(sum(X**2))\n araiblock = (first_Z, first_I, ptrm_check, ptrm_tail,\n zptrm_check, GammaChecks, additivity_check)\n\n return araiblock, field", "def sortby(self):\n ...", "def _sort_phot(self, verbose=False):\n if hasattr(self, \"data\") and hasattr(self, \"data_filters\"):\n ## This looks fugly.\n newkeys = np.array([i for i in self.data_filters.keys()])[np.argsort([self.data_filters[i].lambda_effective.value for i in self.data_filters])]\n\n sorted_data = OrderedDict()\n sorted_data_filters = OrderedDict()\n\n for newkey in newkeys:\n\n if verbose: print(newkey)\n\n sorted_data[newkey] = self.data[newkey]\n sorted_data_filters[newkey] = self.data_filters[newkey]\n\n self.data = sorted_data\n self.data_filters = sorted_data_filters\n\n else:\n warnings.warn(\"Doesn't seem to be any data here (empty self.data)\")\n pass", "def do_dtv_flagging2(data, freqs):\n \n mask = data.mask*1\n dtv_times = []\n \n for ledge in (54, 60, 66, 76, 82):\n uedge = ledge + 6\n band = np.where( (freqs>=ledge) & (freqs<=uedge) )[0]\n trns = np.where( (freqs>=ledge+0.25) & (freqs<=uedge-0.25) )[0]\n empt = np.where( ((freqs>=ledge-0.25) & (freqs<ledge+0.25)) | ((freqs>uedge-0.25) & (freqs<=uedge+0.25)) )[0]\n \n pB = np.mean(data.data[:,band], axis=1)\n pT = np.mean(data.data[:,trns], axis=1)\n pE = np.mean(data.data[:,empt], axis=1)\n \n #import pylab\n #pylab.plot(pB-pE)\n #pylab.plot(pT-pE)\n #pylab.plot(pE-1)\n #pylab.plot(pE*0 + 3*pE.std())\n #pylab.show()\n \n st = np.std(pE)\n bad = np.where( np.abs(pT-pE) > 3*st )[0]\n for b in bad:\n\t dtv_times.append(b)\n mask[b,band] |= True\n if b > 1:\n mask[b-1,band] |= True\n\t\t dtv_times.append(b-1)\n if b < data.shape[0]-2:\n mask[b+1,band] |= True\n\t dtv_times.append(b+1)\n \n dtv_times = sorted(dtv_times)\n\n data.mask = mask*1\n return data.mask, list(set(dtv_times))", "def sort_table(table, sats_table):", "def _build_sort1_table(key_itime, keys_map, header_dict,\n form, form_results, form_resultsi,\n disp_dict, stress_dict, strain_dict, force_dict,\n strain_energy_dict, gpstress_dict, log):\n is_results = False\n form_resultsi_subcase = []\n #for key, value in header_dict.items():\n #print(key, value)\n # (isubcase, analysis_code, sort_method,\n # count, ogs, superelement_adaptivity_index) = key\n key_itime0 = key_itime[0]\n key0 = key_itime0[0]\n # (isubcase, analysis_code, sort_method,\n # count, ogs, superelement_adaptivity_index, pval_step) = key\n subcase_id_old = key0[0]\n count_old = key0[3]\n ogs_old = key0[4]\n subtitle_old = key0[5]\n subtitle_old, label_old, superelement_adaptivity_index_old, unused_pval_step_old = keys_map[key0]\n del label_old\n del superelement_adaptivity_index_old\n\n # now that we have the data built, we put it in the form\n # in sorted order\n #\n # TODO: consider pval_step\n for key, itime in key_itime:\n # (isubcase, analysis_code, sort_method,\n # count, ogs, superelement_adaptivity_index, pval_step) = key\n #print('key =', key)\n subcase_id = key[0]\n count = key[3]\n ogs = key[4]\n #print('*ogs =', ogs)\n #subtitle = key[4]\n try:\n subtitle, unused_label, superelement_adaptivity_index, unused_pval_step = keys_map[key]\n except Exception:\n subcase_id = subcase_id_old\n subtitle = subtitle_old + '?'\n superelement_adaptivity_index = '?'\n raise\n\n #print('key =', key)\n if subcase_id != subcase_id_old or subtitle != subtitle_old or ogs != ogs_old:\n count_str = '' if count == 0 else ' ; opt_count=%s' % count_old\n ogs_str = '' if ogs == 0 else '; OGS=%s' % ogs_old\n subcase_str = 'Subcase %s; %s%s%s%s' % (\n subcase_id_old, subtitle_old, superelement_adaptivity_index, count_str, ogs_str)\n #print(subcase_str)\n res = (\n subcase_str.rstrip('; '),\n None,\n form_resultsi_subcase\n )\n form_resultsi.append(res)\n form_resultsi_subcase = []\n subcase_id_old = subcase_id\n subtitle_old = subtitle\n count_old = count\n ogs_old = ogs\n\n\n try:\n header = header_dict[(key, itime)]\n except KeyError: # this hits for strain energy\n msg = 'Missing (key, itime) in header_dict\\n'\n msg += ' key=%s\\n' % str(key)\n\n (subcase, analysis_code, sort_method,\n count, ogs, superelement_adaptivity_index, pval_step) = key\n msg += f' subcase={subcase}\\n'\n msg += f' analysis_code={analysis_code}\\n'\n msg += f' sort_method={sort_method}\\n'\n msg += f' count={count}\\n'\n msg += f' ogs={ogs}\\n'\n msg += f' superelement_adaptivity_index={superelement_adaptivity_index!r}\\n'\n msg += f' pval_step={pval_step!r}\\n'\n\n msg += ' itime=%s\\n' % itime\n msg += ' %s\\n' % str((key, itime))\n msg += 'Possible (key, time):\\n'\n for keyi in header_dict:\n msg += ' %s\\n' % str(keyi)\n #print(msg.rstrip())\n #print('expected = (%s, %r)\\n' % (str(key), itime))\n log.error(msg.rstrip() + '\\n')\n #self.log.error('expected = (%s, %r)\\n' % (str(key), itime))\n continue\n #raise KeyError(msg)\n try:\n header = header.strip()\n except Exception:\n print('header = %r' % header)\n raise\n\n\n form_outi = []\n form_out = (header, None, form_outi)\n disp_formi = disp_dict[(key, itime)]\n stress_formi = stress_dict[(key, itime)]\n strain_formi = strain_dict[(key, itime)]\n force_formi = force_dict[(key, itime)]\n strain_energy_formi = strain_energy_dict[(key, itime)]\n gpstress_formi = gpstress_dict[(key, itime)]\n if disp_formi:\n form_outi += disp_formi\n #form_outi.append(('Disp', None, disp_formi))\n if stress_formi:\n form_outi.append(('Stress', None, stress_formi))\n is_results = True\n if strain_formi:\n form_outi.append(('Strain', None, strain_formi))\n is_results = True\n if force_formi:\n form_outi.append(('Force', None, force_formi))\n is_results = True\n if strain_energy_formi:\n form_outi.append(('Strain Energy', None, strain_energy_formi))\n is_results = True\n if gpstress_formi:\n form_outi.append(('Grid Point Stresses', None, gpstress_formi))\n is_results = True\n\n if form_outi:\n is_results = True\n form_resultsi_subcase.append(form_out)\n #break\n\n #print(\"subcase_id = \", subcase_id)\n if subcase_id:\n count_str = '' if count == 0 else ' ; opt_count=%s' % count_old\n ogs_str = '' if ogs == 0 else '; OGS=%s' % ogs_old\n subcase_str = 'Subcase %s; %s%s%s' % (subcase_id, subtitle, count_str, ogs_str)\n #print('*', subcase_str)\n res = (\n subcase_str.strip('; '),\n None,\n form_resultsi_subcase\n )\n form_resultsi.append(res)\n assert len(form_out) > 0, form_out\n form_resultsi_subcase = []\n\n if is_results:\n form.append(form_results)\n assert len(form_out) > 0, form_out\n #print('formi =', formi)\n #print('form_out =', form_out)\n #print('form_resultsi =', form_resultsi)\n #print('form_results =', form_results)\n #print(form)\n #if len(formi):\n #form.append(form0)\n #print(form)\n #aa\n #print('form', form)\n #print('form_results =', form_results)\n return form", "def finalize(self):\n self.set_as_sort1()", "def selection_sort(self, data):\n for i in range(len(data)-1, 0, -1):\n i_max = 0\n for j in range(1, i+1):\n if data[j] > data[i_max]:\n i_max = j\n tmp = data[i]\n data[i] = data[i_max]\n data[i_max] = tmp\n print \"pass\", i, data", "def sort_1(l):\n pass", "def flag(self, arr):\n return arr", "def TransformFlags(self) -> _n_2_t_0[bool]:", "def sortSample(self, key, ascending):\n try:\n self.sample[self.sample['masked'] == False].sort_values(by=key, ascending=ascending)\n except:\n pass", "def _sort_dataframe(self, dataframe):\r\n print('Not allowed')", "def _process_data(data):\n for array in data:\n # Check if time is inverted. If so, reverse array while keeping the time/data structure.\n if array and len(array) > 2 and array[0] > array[2]:\n buff_1 = array[::2][::-1]\n buff_2 = array[1::2][::-1]\n array[::2] = buff_1\n array[1::2] = buff_2\n return data", "def post_add_scan_CB(self) -> None:\n # print(\"BEGIN SORTO !\")\n sortlst = list(self._sortdct.items())\n sortlst.sort(key=lambda a: a[0])\n self._rowlst = [row for k, row in sortlst]\n self._reinsert_rows()\n # w3.sortHTML(\"scoaddscanlist\", \".item\", \"td:nth-child(1)\")\n print(\"END SORTO !\")", "def data_missing_for_sorting():\n return RaggedArray([[1, 0], [], [0, 0]])", "def prep(self):\n print\n print 'Filtering rawdata to data as masked array...'\n# using 0 as flag\n# self.data = n.ma.masked_array(self.rawdata[:self.nints,:, self.chans,:], self.rawdata[:self.nints,:, self.chans,:] == 0j)\n# using standard flags\n self.data = n.ma.masked_array(self.rawdata[:self.nints,:, self.chans,:], self.flags[:self.nints,:, self.chans,:] == 0) # mask of True for flagged data (flags=0 in tpipe, which is flags=False in Miriad and flags=True in MS)\n self.dataph = (self.data.mean(axis=3).mean(axis=1)).real #dataph is summed and detected to form TP beam at phase center, multi-pol\n self.min = self.dataph.min()\n self.max = self.dataph.max()\n print 'Shape of data:'\n print self.data.shape\n print 'Dataph min, max:'\n print self.min, self.max\n\n self.freq = self.freq_orig[self.chans]\n\n # set up ur tracks (lol)\n self.dmtrack0 = {}\n self.twidths = {}\n for dmbin in xrange(len(self.dmarr)):\n self.dmtrack0[dmbin] = self.dmtrack(self.dmarr[dmbin],0) # track crosses high-freq channel in first integration\n self.twidths[dmbin] = 0\n for k in self.dmtrack0[dmbin][1]:\n self.twidths[dmbin] = max(self.twidths[dmbin], len(n.where(n.array(self.dmtrack0[dmbin][1]) == k)[0]))\n\n print 'Track width in time: '\n for dmbin in self.twidths:\n print 'DM=%.1f, twidth=%d. Iteration could step by %d/2.' % (self.dmarr[dmbin], self.twidths[dmbin], self.twidths[dmbin])", "def sort_by_default(self):\n self.data.sort()", "def order_filter(self,elements):", "def sorted(self): \n pass", "def main(iterator):\n\n entries = OrderedDict()\n for line in iterator:\n\n if \"START\" in line:\n entries.update({\"start_time\":int(re.search(r'\\d+', line).group())})\n if \"STOP\" in line:\n entries.update({\"end_time\":int(re.search(r'\\d+', line).group())})\n if \"NUMERIC SORT\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"numeric_sort\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n entries.update({\"numeric_sort_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"numeric_sort_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"numeric_sort_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"numeric_sort_num_arrs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"numeric_sort_arr_size\":int(re.search(r'\\d+', line).group())})\n\n if \"STRING SORT\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"string_sort\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_num_arrs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_arr_size\":int(re.search(r'\\d+', line).group())})\n\n if \"STRING SORT\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"string_sort\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_num_arrs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"string_sort_arr_size\":int(re.search(r'\\d+', line).group())})\n\n if \"BITFIELD\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"bitfield\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"bitfield_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"bitfield_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"bitfield_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"bitfield_ops_arr_size\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"bitfield_arr_size\":int(re.search(r'\\d+', line).group())})\n\n if \"FP EMULATION\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"fp_emul\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"fp_emul_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"fp_emul_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"fp_emul_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"fp_emul_num_loops\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"fp_emul_arr_size\":int(re.search(r'\\d+', line).group())})\n\n if \"FOURIER\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"fourier\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"fourier_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"fourier_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"fourier_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"fourier_num_coef\":int(re.search(r'\\d+', line).group())})\n\n if \"ASSIGNMENT\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"assignment\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n entries.update({\"assignment_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"assignment_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"assignment_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"assignment_num_arrs\":int(re.search(r'\\d+', line).group())})\n\n if \"IDEA\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"idea\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"idea_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"idea_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"idea_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"idea_arr_size\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"idea_num_loops\":int(re.search(r'\\d+', line).group())})\n \n if \"HUFFMAN\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"huffman\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"huffman_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"huffman_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"huffman_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"huffman_arr_size\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"huffman_num_loops\":int(re.search(r'\\d+', line).group())})\n\n\n if \"NEURAL NET\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"nnet\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"nnet_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"nnet_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"nnet_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"nnet_num_loops\":int(re.search(r'\\d+', line).group())})\n\n if \"LU DECOMPOSITION\" in line and \"Done with\" not in line:\n #print(float(re.search(r'\\d+', line).group()))\n entries.update({\"lu_decomp\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"lu_decomp_abs_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"lu_decomp_rel_sdv\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"lu_decomp_num_runs\":int(re.search(r'\\d+', line).group())})\n line = next(iterator)\n \n entries.update({\"lu_decomp_num_arrs\":int(re.search(r'\\d+', line).group())})\n\n if \"libc\" in line and \"Baseline\" not in line and \"*\" not in line:\n line = next(iterator)\n \n entries.update({\"memory_index\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"integer_index\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n line = next(iterator)\n \n entries.update({\"float_index\":float(re.search(r\"[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?\", line).group())})\n\n #print(entries)\n return entries", "def _is_sorted_by_data(graph):\n assert graph.format == \"csr\"\n out_of_order = graph.data[:-1] > graph.data[1:]\n line_change = np.unique(graph.indptr[1:-1] - 1)\n line_change = line_change[line_change < out_of_order.shape[0]]\n return out_of_order.sum() == out_of_order[line_change].sum()", "def prep(self):\n print\n print 'Filtering rawdata to data as masked array...'\n# using 0 as flag\n# self.data = n.ma.masked_array(self.rawdata[:self.nints,:, self.chans,:], self.rawdata[:self.nints,:, self.chans,:] == 0j)\n# using standard flags\n self.data = n.ma.masked_array(self.rawdata[:self.nints,:, self.chans,:], self.flags[:self.nints,:, self.chans,:] == 0) # mask of True for flagged data (flags=0 in tpipe, which is flags=False in Miriad and flags=True in MS)\n self.dataph = (self.data.mean(axis=3).mean(axis=1)).real #dataph is summed and detected to form TP beam at phase center, multi-pol\n self.min = self.dataph.min()\n self.max = self.dataph.max()\n print 'Shape of data:'\n print self.data.shape\n print 'Dataph min, max:'\n print self.min, self.max\n\n self.freq = self.freq_orig[self.chans]\n\n self.track0 = self.track(0.)\n self.twidth = 0\n for k in self.track0[1]:\n self.twidth = max(self.twidth, len(n.where(n.array(self.track0[1]) == k)[0]))\n\n print 'Track width in time: %d. Iteration could step by %d/2.' % (self.twidth, self.twidth)", "def custom_sort(arr):\n pass", "def _sort_data ( self, resample_opts ):\n self._data_pntr = []\n for refl_file in self.atcorr_refl:\n if os.path.exists ( os.path.join ( self.datadir, refl_file ) ):\n if resample_opts is None:\n fname = os.path.join ( self.datadir, refl_file )\n else:\n fname = reproject_cut ( os.path.join ( self.datadir, refl_file ),\n **resample_opts )\n self._data_pntr.append (\n gdal.Open ( fname ) )\n else:\n\n raise IOError, \"GDAL cannot open this file: %s\" % ( os.path.join (\n self.datadir, refl_file) )\n self.resample_opts = resample_opts", "def _sort_data ( self, resample_opts ):\n self._data_pntr = []\n for refl_file in self.atcorr_refl:\n if os.path.exists ( os.path.join ( self.datadir, refl_file ) ):\n if resample_opts is None:\n fname = os.path.join ( self.datadir, refl_file )\n else:\n fname = reproject_cut ( os.path.join ( self.datadir, refl_file ),\n **resample_opts )\n self._data_pntr.append (\n gdal.Open ( fname ) )\n else:\n\n raise IOError, \"GDAL cannot open this file: %s\" % ( os.path.join (\n self.datadir, refl_file) )\n self.resample_opts = resample_opts", "def grouping(data,dis):\n cluRe = []\n for i in range(len(data)):\n cluRe.append(np.argsort(dis[i])[0])\n \n return np.asarray(cluRe)", "def _sort_modes(self):\n sort_idx = np.lexsort((self.modes[:, 1], self.modes[:, 0], self.modes[:, 2]))\n self._modes = self.modes[sort_idx]", "def _preprocess_and_filter_original_dataset(data):\n\n label_order = (\"EMPTY\", \"50_SIGN\", \"70_SIGN\", \"80_SIGN\")\n\n filtered_data = []\n for image, signs in data:\n if not signs:\n filtered_data.append((image, label_order.index(\"EMPTY\")))\n else:\n # take the most visible of the interesting signs\n signs = [s for s in signs\n if s.name in label_order and s.visibility == \"VISIBLE\"]\n if signs:\n filtered_data.append((image, label_order.index(signs[0].name)))\n return filtered_data", "def testDegenerate(self):\n srt = asarray(self.copy())\n srt.sort(axis=1)\n return (srt[:,:-1] == srt[:,1:]).any(axis=1)", "def data_for_sorting(allow_in_pandas):\n # Use an empty tuple for first element, then remove,\n # to disable np.array's shape inference.\n return PandasArray(\n np.array([(), (2,), (3,), (1,)])[1:]\n )", "def test_get_flag_array():\n test_file = os.path.join(DATA_PATH, \"paper_test_file.uvh5\")\n test_uv = UVData()\n test_uv.read(test_file)\n\n baseline_array = np.array(list(set(test_uv.baseline_array)))\n flag_array = utils.get_flag_array(test_uv, reds=baseline_array)\n\n test_flags = np.zeros(\n (test_uv.Npols, test_uv.Nbls, test_uv.Ntimes, test_uv.Nfreqs),\n dtype=np.float32,\n )\n\n pol_array = uvutils.polnum2str(test_uv.polarization_array)\n for pol_cnt, pol in enumerate(pol_array):\n for cnt, baseline in enumerate(list(set(test_uv.baseline_array))):\n ant_1, ant_2 = test_uv.baseline_to_antnums(baseline)\n test_flags[pol_cnt, cnt] = test_uv.get_flags(ant_1, ant_2)\n\n test_flags = np.squeeze(test_flags, axis=0)\n assert np.all(test_flags == flag_array)", "def FlagTransits(data,eclipseData):\n \n mask0=num.ma.getmaskarray(data['x'])\n data['UnMasked']=mask0\n i = 0\n #print eclipseData.keys()\n if eclipseData['bool']==False:\n mask1=num.ma.copy(data['x'].mask)\n data['TransitMask']=[]\n\tdata['bool']=False\n else:\n for koi in eclipseData['transit'].keys():\n period = eclipseData['transit'][koi]['period']\n t0 = eclipseData['transit'][koi]['t0']\n dur = eclipseData['transit'][koi]['duration']\n dur = (1.2*dur/24e0)\n t0 = t0 + 54900e0\n width = dur/period\n maxphase=1-width/2\n minphase=width/2\n phase= (data['x']-t0)/period-(data['x']-t0)//period\n idx=num.where((phase>maxphase)|(phase<minphase))\n data['x'][idx]= num.ma.masked\n mask1=num.ma.copy(data['x'].mask)\n if i == 0:\n data['TransitMask']=mask1\n else:\n data['TransitMask']=num.ma.mask_or(mask1,data['TransitMask'])\n i +=1\n\tdata['bool'] = True\n\n return data", "def sort_reads(self): \n if not self.sampling:\n self.convert_to_array()\n self.reads = self.reads[self.reads[:,0].argsort()]", "def step020():\n logger.logMessage('Begin: Sorting records')\n sortCommand = 'sort {0} -t \\';\\' --key 2 -o {1}'.format(candidatesFile,sortedCandidatesFile) \n rc = os.system(sortCommand)\n if rc != 0:\n raise Exception('Error returned by sort program: {0:d}'.format(rc))\n logger.logMessage('End : Sorting records')", "def prep(self, deleteraw=False):\n print\n print 'Filtering rawdata to data as masked array...'\n# using 0 as flag\n# self.data = n.ma.masked_array(self.rawdata[:self.nints,:, self.chans,:], self.rawdata[:self.nints,:, self.chans,:] == 0j)\n# using standard flags\n self.data = n.ma.masked_array(self.rawdata[:self.nints,:, self.chans,:], self.flags[:self.nints,:, self.chans,:] == 0) # mask of True for flagged data (flags=0 in tpipe, which is flags=False in Miriad and flags=True in MS)\n self.dataph = (self.data.mean(axis=3).mean(axis=1)).real #dataph is summed and detected to form TP beam at phase center, multi-pol\n self.min = self.dataph.min()\n self.max = self.dataph.max()\n print 'Shape of data:'\n print self.data.shape\n print 'Dataph min, max:'\n print self.min, self.max\n\n if deleteraw:\n del self.rawdata\n del self.flags\n\n self.freq = self.freq_orig[self.chans]\n\n # set up ur tracks (lol)\n self.dmtrack0 = {}\n self.twidths = {}\n self.delay = {}\n for dmbin in xrange(len(self.dmarr)):\n self.dmtrack0[dmbin] = self.dmtrack(self.dmarr[dmbin],0) # track crosses high-freq channel in first integration\n (trackt, trackc) = self.dmtrack0[dmbin]\n if len(trackc)<len(self.chans):\n print 'Computed track for DM=%.1f is too long for the observation; only %d channels are computed' % (self.dmarr[dmbin],len(trackc))\n continue\n \n# old way\n# self.twidths[dmbin] = [len(n.where(trackc == (chan-self.chans[0]))[0]) for chan in self.chans] # width of track for each unflagged channel\n# self.delay[dmbin] = [n.int(trackt[n.where(trackc == (chan-self.chans[0]))[0][0]]) for chan in self.chans] # integration delay for each unflagged channel of a given dm.\n# new way\n\n self.twidths[dmbin] = [len(n.where(n.array(trackc) == chan)[0]) for chan in range(len(self.chans))] # width of track for each unflagged channel\n self.delay[dmbin] = [n.int(trackt[n.where(n.array(trackc) == chan)[0][0]]) for chan in range(len(self.chans))] # integration delay for each unflagged channel of a given dm.\n\n\n print 'Track width in time: '\n for dmbin in self.twidths:\n print 'DM=%.1f, max(twidth)=%d. Iteration could step by %d/2.' % (self.dmarr[dmbin], max(self.twidths[dmbin]), max(self.twidths[dmbin]))", "def _process_data_sorted(self):\n n, t, trials = self.raw_data.shape[0], self.raw_data.shape[1], self.raw_data.shape[2]\n if self.PD:\n if self.use_silent_channels:\n clean_channels, red_n = self.raw_data, n\n _, _, invalid_ch = self._discard_channels(self.raw_data)\n else:\n clean_channels, red_n, invalid_ch = self._discard_channels(self.raw_data)\n\n session_onmeds = clean_channels[:, :, :, :, self.pd_ses_order[self.i_sub][0]]\n session_offmeds = clean_channels[:, :, :, :, self.pd_ses_order[self.i_sub][1]]\n subsets = [session_onmeds, session_offmeds]\n else:\n if self.use_silent_channels:\n clean_channels, red_n = self.raw_data, n\n _, _, invalid_ch = self._discard_channels(self.raw_data)\n else:\n clean_channels, red_n, invalid_ch = self._discard_channels(self.raw_data)\n session1 = clean_channels[:, :, :, 0::2]\n session2 = clean_channels[:, :, :, 1::2]\n subsets = [session1, session2]\n\n ts = np.empty(shape=(0, 2, self.N_MOTIV, trials, t, red_n))\n for ds in subsets:\n # session(1/2 or on/off) - rg1/rg2 - motiv - trial - sec - signal\n ts_tmp_new = np.zeros((2, self.N_MOTIV, trials, t, red_n))\n\n # rg1:\n ts_tmp_new[0, :, :, :] = np.array(\n (ds[:, :, :, 0].T, ds[:, :, :, 1].T, ds[:, :, :, 2].T)\n )\n # rg2:\n ts_tmp_new[1, :, :, :] = np.array(\n (ds[:, :, :, 3].T, ds[:, :, :, 4].T, ds[:, :, :, 5].T)\n )\n\n ts = np.concatenate((ts, np.array([ts_tmp_new])))\n\n return ts, invalid_ch", "def prepare(self):\n self.datelist = np.array(self.datelist)\n self.adulist = np.array(self.adulist)\n ast = np.argsort(self.datelist)\n return (self.datelist[ast], self.adulist[ast])", "def sort(self):\r\n return self.sort_targets([self])", "def filterNewOperators():\n if not os.path.isfile(newSamplesFile):\n print(\"Nothing to add to scan\")\n return\n\n print(\"New samples will be sorted and added to scan\")\n # --------------------HANDLE NEW OPERATORS-----------------------------\n sortedlistNewSamples = []\n with open(newSamplesFile,'r') as csvFileNewSamples:\n readerNewSamples = csv.DictReader(csvFileNewSamples)\n sortedlistNewSamples = sorted(readerNewSamples, key=operator.itemgetter('HOST', 'PORT')) #Sort Port as text and not as number!\n\n operatorList = []\n lastRow=sortedlistNewSamples[0]\n if checkHost(lastRow['HOST']) and checkPort(lastRow['PORT']):\n operatorList.append(lastRow)\n\n for newSample in sortedlistNewSamples:\n if lastRow['HOST'] == newSample['HOST'] and lastRow['PORT'] == newSample['PORT']:\n continue\n lastRow=newSample\n if checkHost(newSample['HOST']) and checkPort(newSample['PORT']):\n operatorList.append(newSample)\n\n operatorList = check_if_new_operators_in_live_analysis_file(operatorList) # check if they are in liveAnalysisFile\n\n #----------------- MERGE NEW OPERATORS WITH TARGET FILE------------------------\n\n if not os.path.isfile(targetFile):\n with open(targetFile, 'w') as myfile:\n wr = csv.writer(myfile)\n wr.writerow(['HOST','PORT','FILE HASH'])\n sortedlistOperator = []\n with open(targetFile,'r') as csvFileTarget:\n readerTarget = csv.DictReader(csvFileTarget)\n operatorList.extend(readerTarget)\n sortedlistOperator = sorted(operatorList, key=operator.itemgetter('HOST', 'PORT')) #Sort Port as text and not as number!\n\n with open(tempTargetFile, 'w') as myfile:\n wr = csv.writer(myfile)\n wr.writerow(['HOST','PORT','FILE HASH'])\n\n lastOp = sortedlistOperator[0]\n wr.writerow([lastOp['HOST'],lastOp['PORT'],lastOp['FILE HASH']])\n for op in sortedlistOperator:\n if lastOp['HOST'] != op['HOST'] or lastOp['PORT'] != op['PORT']:\n wr.writerow([op['HOST'],op['PORT'],op['FILE HASH']])\n lastOp = op\n os.remove(targetFile)\n os.rename(tempTargetFile,targetFile)\n os.remove(newSamplesFile)", "def precalculate(self):\n self._sorted = np.argsort(\n self.base[self._array], kind='mergesort') # mergesort for stability\n self._boundaries = util.find_boundaries(\n self.base[self._array][self._sorted])", "def sort_scan_data(self, data_key, onedimensional=False):\n if data_key == 'metric_vals':\n label = self.metric_name\n units = 'dimensionless'\n vals = np.array(self.data[data_key])\n elif data_key == 'shifted_metric_vals':\n if not onedimensional:\n label = 'contour'\n else:\n label = 'delta_'+self.metric_name\n units = 'dimensionless'\n vals = np.array(self.data[data_key])\n else:\n label = data_key\n units = self.data[data_key]['units']\n vals = np.array(self.data[data_key]['vals'])\n\n if not onedimensional:\n vals = np.array(np.split(vals, len(self.all_bin_cens[0])))\n return label, units, vals", "def all_bees_lowered_flag(self):\n pos, com, success = self.perception\n return all(map(lambda x: x[1][\"flag\"] == 0, com))", "def bubble_sort(dataset):\n\t# start with array length and decrement each time \n\tarrayLen = len(dataset)\n\tbubbleIndex = len(dataset) - 1\n\twhile bubbleIndex != 0:\n\t\tarrayIndex = 0\n\t\twhile arrayIndex < arrayLen - 1:\n\t\t\tthisVal = dataset[arrayIndex]\n\t\t\tnextVal = dataset[arrayIndex + 1]\n\t\t\tif thisVal > nextVal:\n\t\t\t\tdataset[arrayIndex + 1] = thisVal\n\t\t\t\tdataset[arrayIndex] = nextVal\n\t\t\tarrayIndex += 1\n\t\tprint \"Current State:\", dataset\n\t\tbubbleIndex -= 1", "def merge(line):\n line2=[]\n line3=[]\n line4=[]\n pair=0\n shift=0\n line1=[0]*len(line)\n if(len(line)==1):\n for iota in line:\n line1[0]=iota\n return line1\n \n for iota in xrange(len(line)):\n line4.append(line[iota])\n \n for iota in xrange(len(line)):\n line3.append(line[iota])\n \n \n \n for xinn in xrange(len(line3)):\n for iota in xrange(len(line3)-1):\n if(line3[iota]==0):\n if((line3[iota+1])>0):\n temp=line3[iota];\n line3[iota]=line3[iota+1];\n line3[iota+1]=temp\n shift=1\n xinn=xinn+1\n \n \n if(shift==1):\n for iota in xrange(len(line3)):\n line2.append(line3[iota])\n else:\n for iota in xrange(len(line4)):\n line2.append(line4[iota])\n \n \n \n \n \n \n \n for olay in range(len(line2)-1):\n \n \n if(line2[olay]==line2[olay+1]):\n line1[olay]=2*line2[olay];\n line2[olay+1]=0\n line1[olay+1]=line2[olay+1]\n pair=1;\n olay=olay+2\n else:\n line1[olay]=line2[olay]\n line1[olay+1]=line2[olay+1]\n \n \n \n \n \n \n \n \n \n \n if(pair==0):\n for lonn in xrange(len(line3)):\n line1[lonn]=line3[lonn]\n return line1\n \n \n \n for xinn in xrange(len(line1)):\n for iota in xrange(len(line1)-1):\n if(line1[iota]==0):\n if((line1[iota+1])>0):\n temp=line1[iota];\n line1[iota]=line1[iota+1];\n line1[iota+1]=temp\n \n xinn=xinn+1\n \n return line1", "def start_sort():\n global data\n if algo_box.get() == \"Bubble Sort\":\n bubble_sort(data, display_data, speed_scale.get())\n elif algo_box.get() == \"Merge Sort\":\n merge_sort(data, display_data, speed_scale.get())", "def TurkeyFilter(c):\n if (c[2] > c[0]) and (c[1] > c[0]) and (c[2] == c[1]): return True\n else: return False", "def bubble_optimized_with_flag(array):\n for passes in range(len(array)-1, 0, -1):\n changed = False\n for i in range(passes):\n if array[i] > array[i+1]:\n array[i], array[i+1] = array[i+1], array[i]\n changed = True\n if not changed:\n break", "def data_for_sorting(data_for_grouping):\n return type(data_for_grouping)._from_sequence(\n [data_for_grouping[0], data_for_grouping[7], data_for_grouping[4]]\n )", "def mergeSpeedAndMeta(segmentToMerge,bestGroup,mergedSegments,updatedSpeed,inversedIndex,criteriaSeries,weights,minValidData):\n mergeWithOutSeg = any( bestGroup == inversedIndex.loc[x] for x in mergedSegments.at[segmentToMerge,'outs'])\n\n if mergeWithOutSeg :\n if(len(mergedSegments.at[bestGroup,'outs'])==0 and len(mergedSegments.at[segmentToMerge,'ins'])==0) or(mergedSegments.at[segmentToMerge,'head'] != mergedSegments.loc[bestGroup]['tail']):\n criteriaSeries.at[(segmentToMerge,bestGroup),]=np.inf\n return criteriaSeries\n else :\n if(len(mergedSegments.at[bestGroup,'ins'])==0 and len(mergedSegments.at[segmentToMerge,'outs'])==0) or (mergedSegments.at[segmentToMerge,'tail'] != mergedSegments.loc[bestGroup]['head']):\n criteriaSeries.at[(segmentToMerge,bestGroup),]=np.inf\n return criteriaSeries\n \n if( not mergeWithOutSeg and not any( bestGroup == inversedIndex.loc[x] for x in mergedSegments.at[segmentToMerge,'ins'])) or ( mergeWithOutSeg and any( bestGroup == inversedIndex.loc[x] for x in mergedSegments.at[segmentToMerge,'ins'])):\n print('****************************************\\n\\n*********************************\\n\\n bestgroup not found \\n\\n**********************************\\n')\n print( not mergeWithOutSeg ,any( bestGroup == inversedIndex.loc[x] for x in mergedSegments.at[segmentToMerge,'ins']))\n print(segmentToMerge,bestGroup)\n print( \"ins outs\")\n print([inversedIndex.loc[x] for x in mergedSegments.at[segmentToMerge,'ins']])\n print([inversedIndex.loc[x] for x in mergedSegments.at[segmentToMerge,'outs']])\n print( 'b ins outs')\n print([inversedIndex.loc[x] for x in mergedSegments.at[bestGroup,'ins']])\n print([inversedIndex.loc[x] for x in mergedSegments.at[bestGroup,'outs']])\n \n \n \n \n if mergeWithOutSeg :\n \n mergedSegments.at[segmentToMerge,'outs'] = mergedSegments.loc[bestGroup]['outs']\n mergedSegments.at[segmentToMerge,'cosHead' ] = mergedSegments.at[bestGroup,'cosHead' ]\n mergedSegments.at[segmentToMerge,'sinHead' ] = mergedSegments.at[bestGroup,'sinHead' ]\n mergedSegments.at[segmentToMerge,'head'] = mergedSegments.loc[bestGroup]['head']\n\n else :\n mergedSegments.at[segmentToMerge,'ins'] = mergedSegments['ins'][bestGroup]\n mergedSegments.at[segmentToMerge,'cosTail' ] = mergedSegments.at[bestGroup,'cosTail' ]\n mergedSegments.at[segmentToMerge,'sinTail' ] = mergedSegments.at[bestGroup,'sinTail' ]\n mergedSegments.at[segmentToMerge,'tail'] = mergedSegments.loc[bestGroup]['tail']\n\n newLength = mergedSegments.loc[segmentToMerge]['length']+ mergedSegments.loc[bestGroup]['length']\n\n mergedSegments.at[segmentToMerge,'length'] = newLength\n \n mergedSegments.at[segmentToMerge,'nonNullProp'] = (updatedSpeed.loc[segmentToMerge].notna() | updatedSpeed.loc[bestGroup].notna()).sum()/updatedSpeed.columns.size\n\n\n #mergedSegments.at[segmentToMerge,'edges' ] = np.unique(np.concatenate((mergedSegments.loc[segmentToMerge]['edges'],mergedSegments.loc[bestGroup]['edges'])))\n #speed update\n mergedSegments.at[segmentToMerge,'edges' ] = np.unique(np.concatenate((mergedSegments.loc[segmentToMerge]['edges'],mergedSegments.loc[bestGroup]['edges'])))\n \n updatedSpeed.loc[segmentToMerge]=updatedSpeed.loc[set([inversedIndex.loc[x] for x in [segmentToMerge,bestGroup]])].mean()\n updatedSpeed.drop(bestGroup, inplace=True)\n mergedSegments.drop(bestGroup, inplace=True)\n inversedIndex.replace(bestGroup, segmentToMerge, inplace=True)\n \n \n criteriaSeries.drop(index=bestGroup, level=0, inplace = True)\n \n segmentCriteria = getNeighboursCriteriaIndex(segmentToMerge, mergedSegments,updatedSpeed,inversedIndex,weights,minValidData)\n \n criteriaSeries.drop(index=segmentToMerge, level=0, inplace=True )\n \n \n \n criteriaSeries = criteriaSeries.append( segmentCriteria )\n \n criteriaSeries.drop(index=bestGroup, level=1, inplace = True)\n criteriaSeries.drop(index=segmentToMerge, level=1, inplace = True)\n \n inverseSegmentCriteria = getInversedCriteria(segmentToMerge,mergedSegments,updatedSpeed,inversedIndex,weights,minValidData)\n criteriaSeries = criteriaSeries.append( inverseSegmentCriteria )\n\n \n \n return criteriaSeries", "def resort_couplings(J,sortIx):\n return", "def check_sorted(self):\n last_count = np.inf\n for count in self.Nx:\n if count > last_count:\n self.sorted = False\n return self.sorted\n last_count = count\n self.sorted = True\n return self.sorted", "def basicProcessing(volume, sigma, order, output, mode, truncate):\n\n\n #### Filters ###\n\n result = gaussian_filter(input=volume, sigma=sigma, order=order, output=output, mode=mode, truncate=truncate)\n\n val = threshold_otsu(result)\n print(\"val : {}\".format(val))\n\n mask = np.zeros(volume.shape, dtype=np.int8)\n mask[volume > val] = 1\n #mask = mask.astype(int)\n\n print(\"mask shape: {}\".format(mask.shape))\n print(mask)\n\n\n #### Morphological Operation ###\n\n # Opening removes small objects\n r1 = binary_opening(mask, structure=np.ones((3, 3, 3))).astype(np.int8)\n\n # Closing removes small holes\n r2 = binary_closing(r1, structure=np.ones((3, 3, 3))).astype(np.int8)\n\n\n # 3x3x3 structuring element with connectivity 4 or 8\n struct1 = generate_binary_structure(3, 1) # no diagonal elements\n #struct1 = generate_binary_structure(3, 2) # with diagonal elements\n ############struct1 = struct1.astype(int)\n print (struct1)\n\n\n #r3 = binary_dilation(r2).astype(int)\n r3 = binary_dilation(r2, structure=struct1).astype(int) # using a structure element\n\n # Erosion removes objects smaller than the structure\n r4 = binary_erosion(r3, structure=np.ones((3, 3, 3))).astype(np.int8)\n\n\n #### Measurements ###\n\n struct2 = np.ones((3, 3, 3), dtype=np.int8)\n labeled_array, num_features = label(r4, structure=struct2)\n\n #print(labeled_array)\n print(num_features)\n\n return labeled_array, num_features", "def testFlagFFT(self):\n mode = \"list\"\n infile_spk = self.infile_02spk\n outfile_spk = self.outroot+\"_flagFFT_spk.asap\"\n result = sdbaseline(infile=infile_spk,maskmode=mode,outfile=outfile_spk,blfunc='sinusoid',fftthresh='top3')\n infile_int = self.infile_02int\n outfile_int = self.outroot+\"_flagFFT_int.asap\"\n result = sdbaseline(infile=infile_int,maskmode=mode,outfile=outfile_int,blfunc='sinusoid',fftthresh='top3')\n bsuffix = \"_blparam.txt\"\n self._compareCoefficients(outfile_spk+bsuffix, outfile_int+bsuffix)", "def sort(match, ser_if):\n if match:\n ser_if.write('m')\n else:\n ser_if.write('c')\n return check_response(ser_if)", "def test_parse_flag_file_2(self):\n flag_file = Path(test_file_dir, \"test_flag_file_2.csv\")\n flag_dict = basic.parse_flag_file(flag_file)\n self.assertEqual(len(flag_dict.keys()), 1)", "def operator(self, sort):\r\n return None", "def filterRansac():\n pass", "def getAtomStrainDuplicates(self, tol_mag = 7, verbose = 1, sort = \"angle_same\"):\n\n \"\"\"Favor cells by first making the desired sorting and then removing duplicates\n In relevant cases lexsort by the number of atoms as well\"\"\"\n if isinstance(sort, (int, np.integer, float)):\n p = np.abs(self.getBaseAngles(cell = 1) - np.deg2rad(sort))\n #si = np.argsort(p)\n si = np.lexsort((self.atoms, p))\n self.indexSortInterfaces(index = si)\n string = \"Favoring: Specified angle %.2f deg\" % sort\n elif isinstance(sort, (list, np.ndarray)):\n ang = np.tile(self.getBaseAngles(cell = 1), (np.shape(sort)[0], 1))\n p = np.abs(ang - np.deg2rad(np.array(sort))[:, None])\n #si = np.argsort(np.min(p, axis = 0))\n si = np.lexsort((self.atoms, np.min(p, axis = 0)))\n self.indexSortInterfaces(index = si)\n string = \"Favoring: Specified angles %s deg\" % (\", \".join([str(i) for i in sort]))\n elif sort.lower() == \"length\":\n p = np.sum(self.getCellLengths(cell = 1), axis = 1)\n si = np.argsort(p)\n self.indexSortInterfaces(index = si)\n string = \"Favoring: Minimum Circumference\"\n elif sort.lower() == \"angle_right\":\n p = np.abs(np.pi / 2 - self.getBaseAngles(cell = 1))\n #si = np.argsort(p)\n si = np.lexsort((self.atoms, p))\n self.indexSortInterfaces(index = si)\n string = \"Favoring: Right Angles\"\n elif sort.lower() == \"angle_same\":\n p = np.abs(ut.getCellAngle(self.base_1[:2, :2], verbose = verbose) -\\\n self.getBaseAngles(cell = 1))\n #si = np.argsort(p)\n si = np.lexsort((self.atoms, p))\n self.indexSortInterfaces(index = si)\n string = \"Favoring: Base Angle Match\"\n else:\n string = \"Favoring: As Constructed\"\n\n \"\"\"Find unique strains within specified tolerances\"\"\"\n values = np.zeros((self.atoms.shape[0], 2))\n values[:, 0] = self.atoms.copy()\n values[:, 1] = np.round(self.eps_mas.copy(), tol_mag)\n unique = np.unique(values, axis = 0, return_index = True)[1]\n index = np.in1d(np.arange(self.atoms.shape[0]), unique)\n\n if verbose > 0:\n ut.infoPrint(string)\n string = \"Unique strain/atom combinations found: %i, tol: 1e-%i (all exact matches keept)\"\\\n % (np.sum(index), tol_mag)\n ut.infoPrint(string)\n\n return index", "def sort_collected_data():\n\n def is_from_valid_set(fn):\n return fn.find(\"validation\") != -1\n\n source_dir = \"data\"\n\n x_train_dir = os.path.join(SEG_DATA_FOLDER, \"train\")\n y_train_dir = os.path.join(SEG_DATA_FOLDER, \"train_label\")\n x_valid_dir = os.path.join(SEG_DATA_FOLDER, \"val\")\n y_valid_dir = os.path.join(SEG_DATA_FOLDER, \"val_label\")\n\n for direc in [x_train_dir, y_train_dir, x_valid_dir, y_valid_dir]:\n mkdir_if_not_exist(direc)\n\n images = [x for x in os.listdir(source_dir) if x.find(\"png\") >= 0]\n inputs = [x for x in images if x.find(\"label\") == -1]\n labels = [x for x in images if x.find(\"label\") != -1]\n\n train_x = [x for x in inputs if not is_from_valid_set(x)]\n valid_x = [x for x in inputs if is_from_valid_set(x)]\n train_y = [x for x in labels if not is_from_valid_set(x)]\n valid_y = [x for x in labels if is_from_valid_set(x)]\n\n for f in train_x:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(x_train_dir, f))\n\n for f in train_y:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(y_train_dir, f))\n\n for f in valid_x:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(x_valid_dir, f))\n\n for f in valid_y:\n shutil.copyfile(os.path.join(\"data\", f), os.path.join(y_valid_dir, f))", "def sort_012(a):\n # lo keeps track of the running index coming from the beginning of the list\n # hi keeps track of the running index coming from the end of the list\n # m1 and m2 keep track where the subarray of 1's is located \n # (keeps track of the first and last index of the 1's subarray)\n assert(type(a) == list), \"Array has to be a list\"\n lo, m1 = 0, 0\n hi, m2 = len(a)-1, len(a)-1\n runtime = 0\n while lo <= hi:\n runtime += 1\n if a[lo] == 0:\n if m1 < lo:\n a[m1] = 0\n a[lo] = 1\n m1 += 1\n lo += 1\n elif a[hi] == 2:\n if m2 > hi:\n a[m2] = 2\n a[hi] = 1\n m2 -= 1\n hi -= 1\n elif a[lo] == 1:\n lo += 1\n elif a[hi] == 1:\n hi -= 1\n elif a[lo] == 2 and a[hi] == 0:\n if lo == m1:\n a[lo] = 0\n else:\n a[m1] = 0\n a[lo] = 1\n lo += 1\n m1 += 1\n if hi == m2:\n a[hi] = 2\n else:\n a[m2] = 2\n a[hi] = 1\n m2 -= 1\n hi -= 1\n else:\n print(\"Warning: Logic problem\") \n return a, runtime", "def get_Flagging(flagging_file, n_Rec, nChan, exp_count):\n\n line = subprocess.check_output(['grep','Flagged', flagging_file]) # grab the summary line\n str_line = line.decode('utf-8')\n TOKS = str_line.split()\n total_flagged_pct = float(TOKS[-2]) #data+autocorrelation\n total_uv = float(TOKS[7])\n\n # Getting data flagged percentage\n \n autocorr_flagged_pct = (36 * n_Rec * n_Chan / total_uv)*100.0\n data_flagged_pct = round(total_flagged_pct - autocorr_flagged_pct, 3)\n\n # Finding out which antenna has been flagged completely.\n ANT1, ANT2, FLAG = [], [], [] \n with open(flagging_file, 'r') as f:\n for line in f:\n if \"#\" not in line: # grep -v \"#\"\n if \"Flagged\" not in line: # grep -v \"Flagged\"\n if len(line.split())>2: # avoid new channel-wise summaries at end of flagSummary file\n TOKS=line.split()\n ant1 = int(TOKS[3])\n ant2 = int(TOKS[4])\n flag = float(TOKS[6])\n if (ant1 < ant2) and (flag == 100): # extract non-correlated antenna pairs with 100 percent flagging\n ANT1.append(ant1)\n ANT2.append(ant2)\n FLAG.append(flag)\n\n ant1, ant2, flag = np.asarray(ANT1), np.asarray(ANT2), np.asarray(FLAG)\n \n ANT_NAME = []\n for x in range(0,36):\n count1 = np.count_nonzero(ant1 == x)\n count2 = np.count_nonzero(ant2 == x)\n total_count = count1 + count2\n if total_count == exp_count:\n ant_num = x+1\n ant_name = 'ak'+ str(ant_num)\n ANT_NAME.append(ant_name)\n\n total_flagged_ant = len(ANT_NAME)\n \n flag_ant_file = 'flagged_antenna.txt'\n ffile = open(fig_dir + '/'+ flag_ant_file,'a')\n \n if total_flagged_ant > 1:\n ffile.write(flagging_file[-24:-18])\n ffile.write('\\n')\n for item in ANT_NAME:\n ffile.write(item)\n ffile.write('\\n')\n else:\n ffile.write(flagging_file[-24:-18])\n ffile.write('\\n none \\n')\n\n ffile.close()\n \n return data_flagged_pct, total_flagged_ant, flag_ant_file", "def uncheck_all_sort(self):\n\n self.param_list = []\n self.ageSort.setChecked(False)\n self.sexSort.setChecked(False)\n self.speciesSort.setChecked(False)\n self.genotypeSort.setChecked(False)\n self.subjectIDSort.setChecked(False)\n self.weightSort.setChecked(False)\n self.birthSort.setChecked(False)\n self.fluorescenceSort.setChecked(False)\n self.imagesegSort.setChecked(False)\n self.rasterSort.setChecked(False)", "def sort_0(l):\n l.sort()", "def sortColors(self, nums: List[int]) -> None:\n\n#---------------------Solution1----------------------# Dutch Flag Problem\n\n zero, one, two = 0, 0, len(nums)-1\n\n while one <= two:\n if nums[one] == 0:\n nums[one], nums[zero] = nums[zero], nums[one]\n zero += 1\n one += 1\n elif nums[one] == 1:\n one += 1\n else:\n nums[one], nums[two] = nums[two], nums[one]\n two -= 1\n\n\n#---------------------Solution2----------------------# Brute Force, Fast\n\n zeros=nums.count(0)\n for _ in range(zeros):\n nums.remove(0)\n nums.append(0)\n ones=nums.count(1)\n for _ in range(ones):\n nums.remove(1)\n nums.append(1)\n twos=nums.count(2)\n for _ in range(twos):\n nums.remove(2)\n nums.append(2)", "def compare_sorted_data(self, unsorted_data, descending_order):\n sorted_data = []\n self.all_row_data = [x.lower() for x in self.all_row_data]\n if descending_order:\n sorted_data = sorted([x.lower() for x in unsorted_data], reverse=True)\n else:\n sorted_data = sorted([x.lower() for x in unsorted_data])\n if sorted_data == self.all_row_data:\n return True\n else:\n return False", "def sort_modes(self):\n # sorts by l, then n, then freq\n ind = np.lexsort((self.modes['freq'], self.modes['n'],self.modes['l']))\n self.modes = np.array([self.modes[i] for i in ind],dtype=modetype)", "def findAlternatives(sortedList):\n #zeroing the data below treshold\n global TRESHOLD\n # if THRESHOLD == 0:\n TRESHOLD = readsHistogram(sortedList)\n afterTresholdData = []\n print(len(sortedList))\n for i in range(len(sortedList)):\n if np.mean(sortedList[i].getSamples()) >= TRESHOLD:\n afterTresholdData.append(sortedList[i]) #leaves only the reads only if the mean of the reads above TRESHOLD\n index = 0\n while index < (len(afterTresholdData) - 1):\n counter = 1\n while afterTresholdData[index].getName() == afterTresholdData[index + counter].getName():\n afterTresholdData[index].appendSamples(afterTresholdData[index + counter].getSamples())\n afterTresholdData[index].appendCoordinates(afterTresholdData[index + counter].getCoordinates())\n counter += 1\n index += counter\n alternatives = []\n for item in afterTresholdData:\n if len(item.getSamples().shape) > 1:\n alternatives.append(item)\n print(len(afterTresholdData), len(alternatives))\n return alternatives", "def _sort_time(self):\n time = np.copy(self.data[\"time\"][:])\n ind_sorted = np.argsort(time)\n ind_valid: list[int] = []\n for ind in ind_sorted:\n if time[ind] not in time[ind_valid]:\n ind_valid.append(ind)\n n_time = len(time)\n for key, array in self.data.items():\n if not hasattr(array, \"shape\"):\n continue\n if array.ndim == 1 and array.shape[0] == n_time:\n self.data[key] = self.data[key][ind_valid]\n if array.ndim == 2 and array.shape[0] == n_time:\n self.data[key] = self.data[key][ind_valid, :]", "def improved_bubble_sort(data_list):\n for passnum in range(len(data_list) - 1, 0, -1):\n is_sorted = True\n for idx in range(passnum):\n if data_list[idx] > data_list[idx + 1]:\n temp = data_list[idx]\n data_list[idx] = data_list[idx + 1]\n data_list[idx + 1] = temp\n is_sorted = False\n if is_sorted:\n return", "def sort_results(self):\n pass", "def getFlags(acre_threshold=10, min_acre_diff=40):\n acre_threshold = float(acre_threshold)\n min_acre_diff = float(min_acre_diff)\n if acre_threshold > 100 or acre_threshold == 0:\n raise ValueError('Acre threshold must be between 1-100!')\n\n if acre_threshold > 1:\n acre_threshold *= .01\n\n # run summary stats on breakdown table\n gdb = utils.Geodatabase()\n stats ='ACRES SUM;BENEFIT SUM;ASSESSMENT SUM;SEC_TWN_RNG FIRST'\n case_field='CODE;LANDOWNER_NAME;PIN;COUNTY'\n tmp_stats = r'in_memory\\tmp_stats'\n #tmp_stats = os.path.join(gdb.path, 'tmp_stats') #testing only\n arcpy.analysis.Statistics(gdb.breakdown_table, tmp_stats, stats, case_field)\n\n # create new table\n if not arcpy.Exists(gdb.flag_table):\n flag_table_exists = False\n path, name = os.path.split(gdb.flag_table)\n arcpy.management.CreateTable(path, name)\n\n for fld, alias, ftype in FLAG_FIELDS:\n arcpy.management.AddField(gdb.flag_table, fld, ftype, field_alias=alias, field_length=255)\n\n else:\n # just clear out the rows\n flag_table_exists = True\n arcpy.management.DeleteRows(gdb.flag_table)\n\n # read summarized breakdown table\n sum_d = {}\n s_fields = ['PIN', 'CODE', 'LANDOWNER_NAME', 'SUM_ACRES', 'SUM_BENEFIT', 'SUM_ASSESSMENT', 'FIRST_SEC_TWN_RNG']\n with arcpy.da.SearchCursor(tmp_stats, s_fields) as rows:\n for r in rows:\n sum_d[r[0]] = r[1:]\n\n # read summary table from gdb\n summary_fields = ['PIN', 'OWNER_CODE', 'OWNER', 'ASSESSED_ACRES', 'TOT_BENEFIT',\n 'TOT_ASSESSMENT', 'SECTION', 'TOWNSHIP', 'RANGE', 'COUNTY']\n\n # generate flags\n flagCount = 0\n flag_pins = []\n pin_error_msg = 'PIN not found in Breakdown Table'\n with utils.InsertCursor(gdb.flag_table, [f[0] for f in FLAG_FIELDS[:-1]]) as irows:\n with arcpy.da.SearchCursor(gdb.summary_table, summary_fields) as rows:\n for r in rows:\n newRow = [None] * len(FLAG_FIELDS[:-1])\n par = None\n if r[0] in sum_d:\n plss = '-'.join(['{:0>2}'.format(p) if p else '99' for p in r[6:9]])\n par = sum_d[r[0]]\n newRow[0] = r[0]\n\n # check owner code\n if r[1] != par[0]:\n newRow[2] = 'Owner Code \"{}\" does not macth \"{}\" in breakdown table\"'.format(r[1] if r[1] else '', par[0] if par[0] else '')\n own = r[2]\n\n # check owner last name only\n if own and par[1]:\n ownLast = own.split()[0].upper().rstrip(',')\n bownLast = par[1].split()[0].upper().rstrip(',')\n if ownLast != bownLast:\n newRow[3] = 'Last name \"{}\" in summary table does not match \"{}\" in breakdown table'.format(ownLast, bownLast)\n\n # check acres based on pecent threshold\n acres = r[3]\n bacres = par[2]\n diff = acres - bacres\n perc_diff = (acres * acre_threshold)\n\n if abs(diff) >= perc_diff and abs(diff) >= min_acre_diff:\n newRow[4] = diff\n newRow[5] = perc_diff\n\n # check benefits and assessments, these should be exact matches!\n ben_diff = r[4] - par[3]\n if ben_diff:\n if ben_diff > 0.1:\n newRow[6] = ben_diff\n\n assess_diff = r[5] - par[4]\n if assess_diff:\n if assess_diff > 0.1:\n newRow[7] = assess_diff\n\n # verify plss info\n if plss != par[5]:\n newRow[8] = 'Section \"{}\" does not match \"{}\" from breakdown table'.format(plss, par[5])\n\n else:\n newRow[:2] = [r[0], pin_error_msg]\n\n if len(filter(None, newRow)) >= 2:\n # add county\n newRow[9] = r[-1]\n irows.insertRow(newRow)\n flagCount += 1\n\n if newRow[1] != pin_error_msg:\n flag_pins.append(newRow[0])\n\n # flag PINs in breakdown table, PINs keep getting set to NULL from relationship table??\n with utils.UpdateCursor(gdb.breakdown_table, [utils.PIN, 'FLAG']) as urows:\n for row in urows:\n if row[0] in flag_pins:\n row[1] = 'Y'\n else:\n row[1] = 'N'\n urows.updateRow(row)\n\n # flag PINs in summary table\n with utils.UpdateCursor(gdb.summary_table, [utils.PIN, 'FLAG']) as rows:\n for row in urows:\n if row[0] in flag_pins:\n row[1] = 'Y'\n else:\n row[1] = 'N'\n rows.updateRow(row)\n\n## # set up relationship classes, this is killing GDB performance, will just have to go with table joins :(\n## sum_rel = os.path.join(gdb.path, 'Summary_Relationship')\n## brk_rel = os.path.join(gdb.path, 'Breakdown_Relationship')\n## if not arcpy.Exists(sum_rel):\n## arcpy.management.CreateRelationshipClass(gdb.summary_table, gdb.flag_table, sum_rel, 'SIMPLE', 'Flags', 'Summary', 'BOTH', 'ONE_TO_ONE', 'NONE','PIN', 'PIN')\n## utils.Message('created ' + os.path.basename(sum_rel))\n##\n## if not arcpy.Exists(brk_rel):\n## arcpy.management.CreateRelationshipClass(gdb.flag_table, gdb.breakdown_table, brk_rel, 'SIMPLE', 'Breakdown', 'Flags', 'BOTH', 'ONE_TO_MANY', 'NONE', 'PIN', 'PIN')\n## utils.Message('created ' + os.path.basename(brk_rel))\n\n # compact gdb\n arcpy.management.Compact(gdb.path)\n\n # report message\n utils.Message('Found {} flags between summary and breakdown tables'.format(flagCount))\n return", "def test_get_flag_array_no_squeeze():\n test_file = os.path.join(DATA_PATH, \"paper_test_file.uvh5\")\n test_uv = UVData()\n test_uv.read(test_file)\n\n baseline_array = np.array(list(set(test_uv.baseline_array)))\n flag_array = utils.get_flag_array(test_uv, reds=baseline_array, squeeze=False)\n\n test_flags = np.zeros(\n (test_uv.Npols, test_uv.Nbls, test_uv.Ntimes, test_uv.Nfreqs),\n dtype=np.float32,\n )\n\n pol_array = uvutils.polnum2str(test_uv.polarization_array)\n for pol_cnt, pol in enumerate(pol_array):\n for cnt, baseline in enumerate(list(set(test_uv.baseline_array))):\n ant_1, ant_2 = test_uv.baseline_to_antnums(baseline)\n test_flags[pol_cnt, cnt] = test_uv.get_flags(ant_1, ant_2)\n\n assert np.all(test_flags == flag_array)", "def diff_flag(data):\n dp = np.abs(np.diff(data))\n dp = np.concatenate(([0], dp))\n return dp", "def sort_col(self, mask):\n nan_index = np.where(mask == True)[1]\n unique = np.unique(nan_index)\n nan_index = list(nan_index)\n dict = {}\n for item in unique:\n count = nan_index.count(item)\n dict[item] = count\n tmp = sorted(dict.items(), key=lambda e: e[1], reverse=True)\n sort_index = []\n for item in tmp:\n sort_index.append(item[0])\n return sort_index", "def _binarySplit(dataSet, feat_ind, val):\n\t\tif type(val).__name__ == 'set':\n\t\t\tD1_row_ind = np.array([value in val for value in dataSet[:, feat_ind]])\n\t\telse:\n\t\t\tD1_row_ind = dataSet[:, feat_ind] > val\n\t\tD2_row_ind = True ^ D1_row_ind\n\t\tD1, D2 = dataSet[D1_row_ind, :], dataSet[D2_row_ind, :]\n\t\treturn D1, D2", "def filterfn(read):\n return (read.is_proper_pair and read.is_paired and read.tlen > 0 and not read.is_supplementary and not read.is_duplicate and not read.is_unmapped and not read.mate_is_unmapped)", "def _sort(self):\n self.rows.sort(key=lambda x: (x['PERC1'], x['EQ'], x['PASS'], x['W2']),\n reverse=True)\n\n rank = 0\n prev_perc = 0\n prev_rank = 0\n for row in self.rows:\n if row[\"NR\"] == 0:\n # Something has already populated NR as 0 - so we set rank as\n # 0 too\n row['_RANK'] = 0\n row['_NR'] = 0\n continue\n\n # Increment our count\n rank += 1\n if row['PERC1'] == prev_perc:\n row['NR'] = \"\"\n row['_NR'] = prev_rank # I.e. joint 6th will be 6 here\n row['_RANK'] = rank # I.e. joint 6th could be 7, or 8 etc. here\n else:\n row['NR'] = rank\n row['_NR'] = rank\n row['_RANK'] = rank\n prev_perc = row['PERC1']\n prev_rank = rank", "def sort(self):\r\n\t\treturn sorted(self.sample)", "def sort(self):\n for i in range(self.num):\n for j in range(i,0,-1):\n if self.genepool[0][j] < self.genepool[0][j-1]:\n self.genepool[0][j], self.genepool[0][j-1] = self.genepool[0][j-1], self.genepool[0][j]\n else:\n break\n for i in range(self.num):\n for j in range(i,0,-1):\n if self.genepool[1][j] < self.genepool[1][j-1]:\n self.genepool[1][j], self.genepool[1][j-1] = self.genepool[1][j-1], self.genepool[1][j]\n else:\n break", "def process(self, tile):\n directory = os.path.join(self.Cg_Cfg.output_preprocess, tile.upper())\n print(\"Start speckle filtering: \" + tile.upper())\n year_outcore_list = [\"2019\", \"2018\"]\n year_filter_list = [\"2019\", \"2018\"]\n\n year_outcore_str = \"-\".join(year_outcore_list) # pour les noms de fichiers\n\n filelist_s1des = []\n filelist_s1asc = []\n filelist_s1des_updateoutcore = []\n filelist_s1asc_updateoutcore = []\n # Build the lists of files :\n # - for computing outcores\n # - for filtering\n\n for y in year_outcore_list:\n for file_it in glob.glob(os.path.join(directory, \"s1?_?????_??_DES_???_\" + y + \"????t??????.tif\")):\n filelist_s1des_updateoutcore.append(file_it)\n\n for file_it in glob.glob(os.path.join(directory, \"s1?_?????_??_ASC_???_\" + y + \"????t??????.tif\")):\n filelist_s1asc_updateoutcore.append(file_it)\n\n # Select only 100 images for the outcore dataset (for both ASC and DES outcores)\n filelist_s1des_updateoutcore = filelist_s1des_updateoutcore[:100]\n filelist_s1asc_updateoutcore = filelist_s1asc_updateoutcore[:100]\n\n for y in year_filter_list:\n for file_it in glob.glob(os.path.join(directory, \"s1?_?????_??_DES_???_\" + y + \"????t??????.tif\")):\n filelist_s1des.append(file_it)\n\n for file_it in glob.glob(os.path.join(directory, \"s1?_?????_??_ASC_???_\" + y + \"????t??????.tif\")):\n filelist_s1asc.append(file_it)\n\n print(filelist_s1des)\n print()\n print(filelist_s1asc)\n print()\n\n if self.Cg_Cfg.Reset_outcore:\n processed_files = []\n try:\n os.remove(os.path.join(directory, \"outcore\" + year_filter + \".txt\"))\n except:\n pass\n else:\n try:\n processed_files = \\\n pickle.load(open(os.path.join(directory, \"outcore\" + year_filter + \".txt\")))\n except pickle.PickleError:\n processed_files = []\n\n # Compute the outcores for ASC and DES images\n\n for file_it in processed_files:\n try:\n filelist_s1des_updateoutcore.remove(file_it)\n filelist_s1asc_updateoutcore.remove(file_it)\n except ValueError:\n pass\n\n # Build the strings containing the filenames to be processed\n filelist_s1des_updateoutcore_str = \" \".join(filelist_s1des_updateoutcore)\n filelist_s1asc_updateoutcore_str = \" \".join(filelist_s1asc_updateoutcore)\n filelist_s1des_str = \" \".join(filelist_s1des)\n filelist_s1asc_str = \" \".join(filelist_s1asc)\n\n pids = []\n\n # Adapts the processing ressources to only two processes\n\n ram_per_process = int(self.Cg_Cfg.ram_per_process * self.Cg_Cfg.nb_procs / 2)\n OTBThreads = int(self.Cg_Cfg.OTBThreads * self.Cg_Cfg.nb_procs / 2)\n\n ####### TK\n # On vide la liste des fichiers ASC pour eviter de calculer l'outcore\n filelist_s1asc_updateoutcore = []\n filelist_s1asc = []\n #\n\n if filelist_s1des_updateoutcore:\n command = 'export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={};'.format(OTBThreads)\\\n + \"otbcli_MultitempFilteringOutcore -progress false -inl \"\\\n + filelist_s1des_updateoutcore_str + \" -oc \"\\\n + os.path.join(directory, \"outcore\" + year_outcore_str + \"_S1DES.tif\")\\\n + \" -wr {}\".format(self.Cg_Cfg.Window_radius)\\\n + \" -ram {}\".format(str(ram_per_process))\n pids.append([Popen(command, stdout=self.Cg_Cfg.stdoutfile,\n stderr=self.Cg_Cfg.stderrfile, shell=True), command])\n if filelist_s1asc_updateoutcore:\n command = 'export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={};'.format(OTBThreads)\\\n + \"otbcli_MultitempFilteringOutcore -progress false -inl \"\\\n + filelist_s1asc_updateoutcore_str + \" -oc \"\\\n + os.path.join(directory, \"outcore\" + year_outcore_str + \"_S1ASC.tif\")\\\n + \" -wr \" + str(self.Cg_Cfg.Window_radius)\\\n + \" -ram {}\".format(str(ram_per_process))\n pids.append([Popen(command, stdout=self.Cg_Cfg.stdoutfile,\n stderr=self.Cg_Cfg.stderrfile, shell=True), command])\n try:\n os.makedirs(os.path.join(directory, \"filtered\"))\n except os.error:\n pass\n\n title = \"Compute outcore\"\n nb_cmd = len(pids)\n print(title + \"... 0%\")\n while len(pids) > 0:\n\n for i, pid in enumerate(pids):\n status = pid[0].poll()\n if status:\n print(\"Error in pid #\" + str(i) + \" id = \" + str(pid[0]))\n print(pid[1])\n del pids[i]\n break\n\n elif status == 0:\n del pids[i]\n print(title + \"... \" + str(int((nb_cmd - len(pids)) * 100. / nb_cmd)) + \"%\")\n time.sleep(0.2)\n break\n time.sleep(2)\n\n processed_files = processed_files + filelist_s1des_updateoutcore\\\n + filelist_s1asc_updateoutcore\n\n pickle.dump(processed_files, open(os.path.join(directory, \"outcore.txt\"), 'w'))\n\n # Compute the filtered images using the outcores\n\n pids = []\n if filelist_s1des:\n command = 'export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={};'.format(OTBThreads)\\\n + \"otbcli_MultitempFilteringFilter -progress false -inl \"\\\n + filelist_s1des_str + \" -oc \"\\\n + os.path.join(directory, \"outcore\" + year_outcore_str + \"_S1DES.tif\")\\\n + \" -wr \" + str(self.Cg_Cfg.Window_radius) + \" -enl \"\\\n + os.path.join(directory, \"filtered\", \"enl_\" + year_outcore_str + \"_S1DES.tif\")\\\n + \" -ram {}\".format(str(ram_per_process))\n pids.append([Popen(command, stdout=self.Cg_Cfg.stdoutfile,\n stderr=self.Cg_Cfg.stderrfile, shell=True), command])\n\n if filelist_s1asc:\n command = 'export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={};'.format(OTBThreads)\\\n + \"otbcli_MultitempFilteringFilter -progress false -inl \"\\\n + filelist_s1asc_str + \" -oc \"\\\n + os.path.join(directory, \"outcore\" + year_outcore_str + \"_S1ASC.tif\")\\\n + \" -wr \" + str(self.Cg_Cfg.Window_radius) + \" -enl \"\\\n + os.path.join(directory, \"filtered\", \"enl_\" + year_outcore_str + \"_S1ASC.tif\")\\\n + \" -ram {}\".format(str(ram_per_process))\n pids.append([Popen(command, stdout=self.Cg_Cfg.stdoutfile,\n stderr=self.Cg_Cfg.stderrfile, shell=True), command])\n\n title = \"Compute filtered images\"\n nb_cmd = len(pids)\n print(title + \"... 0%\")\n while len(pids) > 0:\n\n for i, pid in enumerate(pids):\n status = pid[0].poll()\n if status:\n print(\"Error in pid #\" + str(i) + \" id = \" + str(pid[0]))\n print(pid[1])\n del pids[i]\n break\n\n elif status == 0:\n del pids[i]\n print(title + \"... \" + str(int((nb_cmd - len(pids)) * 100. / nb_cmd)) + \"%\")\n time.sleep(0.2)\n break\n time.sleep(2)\n\n filtering_directory = os.path.join(directory, 'filtered/')\n for f in os.listdir(filtering_directory):\n fullpath = os.path.join(filtering_directory, f)\n if os.path.isfile(fullpath) and f.startswith('s1') and f.endswith('filtered.tif'):\n dst = gdal.Open(fullpath, gdal.GA_Update)\n dst.SetMetadataItem('FILTERED', 'true')\n dst.SetMetadataItem('FILTERING_WINDOW_RADIUS', str(self.Cg_Cfg.Window_radius))\n dst.SetMetadataItem('FILTERING_PROCESSINGDATE', str(datetime.datetime.now()))", "def RedFilter(c):\n if (c[0] > c[1]) and (c[0] > c[2]) and (c[1] == c[2]): return True\n else: return False", "def sort_data(data,header,sort_tags,rev_sort,sortkind='mergesort'):\n tags = header.split(',') # Split the header string into separate\n # elements of a list.\n nsort = len(sort_tags) # Number of columns to sort by\n for ii in range(0,nsort,1):\n try:\n sc = tags.index(sort_tags[ii]) # Check tags match headings\n except ValueError:\n print('Error! Sort tag not recognised:',sort_tags[0])\n return None\n if (len(sort_tags) != len(rev_sort)):\n print(\"Error! Lists 'sort_tags' and 'rev_sort' have different lengths\")\n print('len(sort_tags) =',len(sort_tags),' len(rev_sort) =',\n len(rev_sort))\n return None\n sc = tags.index(sort_tags[0]) # First column to sort by\n sortdata = data # Take a copy to change it\n if rev_sort[0]:\n # Reverse sort data array\n sortdata = sortdata[sortdata[:,sc].argsort()[::-1]] # sort by column sc\n else:\n # Sort data array\n sortdata = sortdata[sortdata[:,sc].argsort()] # sort by column sc\n ii = 1\n for s_tag in sort_tags[1:]:\n sc = tags.index(s_tag) # Next column to sort by\n if rev_sort[ii]:\n sortdata = sortdata[sortdata[:,sc].argsort(kind=sortkind)[::-1]]\n else:\n sortdata = sortdata[sortdata[:,sc].argsort(kind=sortkind)]\n ii += 1\n return sortdata", "def sort(self): \n \n for i in range(0,len(self.tasks)):\n if len(self.tasks[i].build_block_belong) == 3:\n self.tree.add_element(self.tasks[i].build_block_belong,i)", "def test_sort_sample_ids_by_mapping_value(self):\r\n actual = sort_sample_ids_by_mapping_value(mapping_file=self.mapping_f1,\r\n field='days_since_epoch',\r\n field_type_f=float)\r\n expected = zip(['NotInOtuTable', '1', 'Z2', 'Z1', 'A'],\r\n [0.0, 5.7, 10, 23, 400000])\r\n self.assertEqual(actual, expected)", "def test_call_suppress_sort(self):\r\n\r\n exp_otu_ids = range(3)\r\n exp_clusters = [['uclust_test_seqs_0'],\r\n ['uclust_test_seqs_1'],\r\n ['uclust_test_seqs_2']]\r\n\r\n app = UclustOtuPicker(params={'Similarity': 0.90,\r\n 'suppress_sort': True,\r\n 'optimal': True,\r\n 'enable_rev_strand_matching': True,\r\n 'save_uc_files': False})\r\n obs = app(self.tmp_seq_filepath2)\r\n obs_otu_ids = sorted(obs.keys())\r\n obs_clusters = sorted(obs.values())\r\n # The relation between otu ids and clusters is abitrary, and\r\n # is not stable due to use of dicts when parsing clusters -- therefore\r\n # just checks that we have the expected group of each\r\n self.assertEqual(obs_otu_ids, exp_otu_ids)\r\n self.assertEqual(obs_clusters, exp_clusters)", "def sortRegioni(tupla):\n\t\n\treturn int(tupla[0])", "def sort(self):\n \n ct=[]\n rt=[]\n wr=[]\n # search for tags that aren't in the right position\n for i in range(len(self.contigs)):\n c = self.contigs[i]\n if c.wa:\n if not self.wa:\n self.wa=[]\n self.wa.extend(c.wa)\n if c.ct:\n newcts=[ct_tag for ct_tag in c.ct if ct_tag.name!=c.name]\n map(self.contigs[i].ct.remove,newcts)\n ct.extend(newcts)\n for j in range(len(c.reads)):\n r = c.reads[j]\n if r.rt:\n newrts=[rt_tag for rt_tag in r.rt if rt_tag.name!=r.rd.name]\n map(self.contigs[i].reads[j].rt.remove,newrts)\n rt.extend(newrts)\n if r.wr:\n newwrs=[wr_tag for wr_tag in r.wr if wr_tag.name!=r.rd.name]\n map(self.contigs[i].reads[j].wr.remove,newwrs)\n wr.extend(newwrs)\n # now sort them into their proper place\n for i in range(len(self.contigs)):\n c = self.contigs[i]\n for ct_tag in ct:\n if ct_tag.name==c.name:\n if self.contigs[i].ct is None:\n self.contigs[i].ct=[]\n self.contigs[i].ct.append(ct_tag)\n if rt or wr:\n for j in range(len(c.reads)):\n r = c.reads[j]\n for rt_tag in rt:\n if rt_tag.name==r.rd.name:\n if self.contigs[i].reads[j].rt is None:\n self.contigs[i].reads[j].rt=[]\n self.contigs[i].reads[j].rt.append(rt_tag)\n for wr_tag in wr:\n if wr_tag.name==r.rd.name:\n if self.contigs[i].reads[j].wr is None:\n self.contigs[i].reads[j].wr=[]\n self.contigs[i].reads[j].wr.append(wr_tag)", "def sort(self):\n for i in range(self.num):\n for j in range(i,0,-1):\n if self.genepool[0][j].fitness < self.genepool[0][j-1].fitness:\n self.genepool[0][j], self.genepool[0][j-1] = self.genepool[0][j-1], self.genepool[0][j]\n else:\n break\n for i in range(self.num):\n for j in range(i,0,-1):\n if self.genepool[1][j].fitness < self.genepool[1][j-1].fitness:\n self.genepool[1][j], self.genepool[1][j-1] = self.genepool[1][j-1], self.genepool[1][j]\n else:\n break", "def read_all_odbsql_stn_withfeedback(dataset, odbfile):\n columns, kinds, tdict = make_odb_header(odbfile, dataset) \n try: \n t=time.time() \n try:\n f=gzip.open(odbfile) \n except:\n print(odbfile, 'The zipped ODB file was not found !')\n return\n \n #d=['date@hdr','time@hdr','statid@hdr','vertco_reference_1@body','varno@body','reportype','andate','antime',\n # 'obsvalue@body','fg_depar@body','an_depar@body','biascorr@body','sonde_type@conv','collection_identifier@conv','source@hdr']\n \n # had to remove 'collection_identifier@conv' to make it work with 1, 3188, 1759, 1761 \n \n tdict['sensor@hdr']=numpy.float32\n tdict['ppcode@conv_body']=numpy.float32\n \n '''\n d=['date@hdr','time@hdr','statid@hdr','vertco_reference_1@body','varno@body','lon@hdr','lat@hdr','seqno@hdr',\n 'obsvalue@body','source@hdr' , 'vertco_type@body']\n \n if 'fg_depar@body' in columns: # creating the colkumns for era5fb \n d=d+['fg_depar@body','an_depar@body','biascorr@body','sonde_type@conv','reportype','andate','antime']\n '''\n \n# restrict feedback to certain columns \n #for c in columns:\n # if c not in d:\n # del tdict[c]\n \n #columns=d.copy()\n \n alldict=pd.read_csv(f,delimiter='\\t', usecols=columns, quoting=3,comment='#', skipinitialspace=True, dtype=tdict) #nrows=1000000)\n \n \"\"\" Case where erafb is not available \"\"\"\n if 'fg_depar@body' not in columns:\n alldict['fg_depar@body']=numpy.float32(numpy.NaN)\n alldict['an_depar@body']=numpy.float32(numpy.NaN)\n alldict['biascorr@body']=numpy.float32(numpy.NaN)\n alldict['sondetype@conv']=numpy.int32(-2147483648)\n alldict['reportype']=numpy.int32(-2147483648)\n \n #print(time.time()-t,sys.getsizeof(alldict)//1024//1024)\n idx=numpy.where(numpy.logical_or(alldict.reportype.values==16045,alldict.reportype.values==16068))[0]\n if len(idx)>0:\n \n #alldict.drop(index=alldict.index[idx],inplace=True)\n y=numpy.int64(alldict['date@hdr'].values)*1000000+alldict['time@hdr'].values\n x=numpy.unique(y)\n dropindex=[]\n for i in range(1,x.shape[0]):\n if x[i]-x[i-1]<60:\n idx=numpy.where(y==x[i-1])[0]\n if idx.shape[0]>0:\n dropindex.append(idx)\n else:\n print('empty index')\n if dropindex: \n dropindex = numpy.concatenate(dropindex).ravel()\n alldict.drop(index=alldict.index[dropindex],inplace=True)\n \n #print(time.time()-t) #,sys.getsizeof(alldict)//1024//1024)\n \n #idx=numpy.where(alldict.reportype.values==16045)[0]\n #if idx.shape[0]>0:\n #idy=numpy.where(numpy.logical_and(alldict.reportype.values!=16045,alldict.reportype.values!=16068))[0]\n #if idy.shape[0]>0:\n #idz=numpy.isin(alldict.andate.values[idy],alldict.andate.values[idx])\n #if numpy.sum(idz)>0:\n #alldict.drop(index=alldict.index[idy[idz]],inplace=True)\n \n #idx=numpy.where(alldict.reportype.values==16068)[0]\n #if idx.shape[0]>0:\n #idy=numpy.where(numpy.logical_and(alldict.reportype.values!=16045,alldict.reportype.values!=16068))[0]\n #if idy.shape[0]>0:\n #idz=numpy.isin(alldict.andate.values[idy],alldict.andate.values[idx])\n #if numpy.sum(idz)>0:\n #alldict.drop(index=alldict.index[idy[idz]],inplace=True)\n \n \n #print(time.time()-t,sys.getsizeof(alldict)//1024//1024)\n \n alldict['source_id'] = dataset.rjust(10)\n\n for c in alldict.columns:\n \n if type(alldict[c].iloc[0]) in [str,bytes]:\n l=alldict[c].shape[0]\n slen=len(alldict[c].values[0])\n alldict[c]=numpy.array(alldict.pop(c).values,dtype='S{}'.format(slen))\n #alldict[c]=numpy.string_(alldict[c])\n \n if type(alldict[c].iloc[0]) is numpy.int64:\n alldict[c]=numpy.int32(alldict[c])\n \n if type(alldict[c].iloc[0]) is numpy.float64:\n alldict[c]=numpy.float32(alldict[c])\n \n #print('after odb:',time.time()-t)\n \n except MemoryError:\n print('Reading ODB failed ! ' + odbfile)\n return alldict\n \n #print(odbfile,time.time()-t)#, sys.getsizeof(alldict))\n\n \n return alldict", "def disc_sort_img(image_list, img_vessel_list, base_label):\n\n return_vessel_list = []\n return_img_list = []\n for idx, vessel_idx in enumerate(img_vessel_list):\n if vessel_idx['idx'] == base_label:\n return_img_list.append(image_list[idx])\n return_vessel_list.append(vessel_idx)\n img_vessel_list.pop(idx)\n image_list.pop(idx)\n\n disc_exist_image_list = []\n disc_exist_vessel_list = []\n # OM_info 0 is fovea, 1 is disc\n\n\n for idx, vessel_idx in enumerate(img_vessel_list):\n tmp_vessel_idx = vessel_idx.copy()\n if tmp_vessel_idx['OM_info'][1] != None:\n tmp_vessel_idx['OM_info'] = np.sqrt(np.power(np.array(tmp_vessel_idx['OM_info'][1]) - np.array(return_vessel_list[0]['OM_info'][1]),2).sum())\n disc_exist_image_list.append([image_list[idx], tmp_vessel_idx['OM_info']])\n disc_exist_vessel_list.append(tmp_vessel_idx)\n\n\n disc_exist_image_list = sorted(disc_exist_image_list, key= lambda image_info : image_info[1])\n disc_exist_vessel_list = sorted(disc_exist_vessel_list, key=lambda vessel_info : vessel_info['OM_info'])\n\n fovea_exist_image_list = []\n fovea_exist_vessel_list = []\n for idx, vessel_idx in enumerate(img_vessel_list):\n tmp_vessel_idx = vessel_idx.copy()\n if tmp_vessel_idx['OM_info'][1] == None and tmp_vessel_idx['OM_info'][0] != None:\n tmp_vessel_idx['OM_info'] = np.sqrt(\n np.power(np.array(tmp_vessel_idx['OM_info'][0]) - np.array(return_vessel_list[0]['OM_info'][0]), 2).sum())\n fovea_exist_image_list.append([image_list[idx], tmp_vessel_idx['OM_info']])\n fovea_exist_vessel_list.append(tmp_vessel_idx)\n\n fovea_exist_image_list = sorted(fovea_exist_image_list, key=lambda image_info: image_info[1])\n fovea_exist_vessel_list = sorted(fovea_exist_vessel_list, key=lambda vessel_info : vessel_info['OM_info'])\n\n for idx, disc_exist_vessel_list_idx in enumerate(disc_exist_vessel_list):\n return_img_list.append(disc_exist_image_list[idx][0])\n return_vessel_list.append(disc_exist_vessel_list_idx)\n\n for idx, fovea_exist_vessel_list_idx in enumerate(fovea_exist_vessel_list):\n return_img_list.append(fovea_exist_image_list[idx][0])\n return_vessel_list.append(fovea_exist_vessel_list_idx)\n\n return return_img_list, return_vessel_list" ]
[ "0.53327185", "0.53327185", "0.53093725", "0.52391124", "0.5232353", "0.52152324", "0.52016133", "0.5197189", "0.51695603", "0.5158035", "0.51258236", "0.5116387", "0.51104957", "0.5099126", "0.5062088", "0.5054855", "0.5042443", "0.5038638", "0.5018408", "0.50026417", "0.49963453", "0.49787465", "0.49584383", "0.49555293", "0.49437356", "0.49429914", "0.49424896", "0.49357113", "0.49347976", "0.4929586", "0.49258077", "0.49181694", "0.48930812", "0.4874134", "0.4874134", "0.48720947", "0.48171887", "0.4816918", "0.4811186", "0.4807494", "0.48011664", "0.47999868", "0.47919762", "0.47793058", "0.477815", "0.47584867", "0.47482464", "0.47462398", "0.47351676", "0.47312474", "0.47267", "0.47234896", "0.47094858", "0.4707949", "0.47070563", "0.47047785", "0.46996942", "0.46949327", "0.46924406", "0.46844313", "0.46792674", "0.4670353", "0.46685585", "0.46677315", "0.46659607", "0.46648613", "0.46648133", "0.46632758", "0.46601307", "0.46594304", "0.46540025", "0.4649229", "0.46373117", "0.46340904", "0.4630468", "0.46292678", "0.46244028", "0.46175128", "0.46164504", "0.4614487", "0.4606978", "0.460319", "0.4600662", "0.45998517", "0.45989874", "0.4596239", "0.45884356", "0.4579053", "0.45728937", "0.45711255", "0.45689917", "0.45628634", "0.45594093", "0.455786", "0.4552195", "0.45511135", "0.454873", "0.45462334", "0.45425442", "0.45237958" ]
0.6509692
0
Render view to show appointment has been scheduled successfully
def complete_appointment(request, calendar_id): calendar = Calendar.objects.get(pk=calendar_id) return render(request, 'complete_appointment.html', {'calendar': calendar})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def schedule(request):\r\n\r\n return render(request, 'editorial/schedule.html', {})", "def schedule(request):\n return render(request, 'vaxcharts/schedule.html')", "def completed_appointments(request):\n appointments = AppointmentRequests.objects.all().filter(completed=True)\n return render(request,\"completed_appointments.html\",{\"appointments\":appointments})", "def appointments(request):\n now = timezone.localtime(timezone.now())\n data = {}\n tables = {}\n rows = []\n seen = Appointment.objects.filter(seen_time__isnull=False).filter(\n checkin_date__iexact=now.date())\n # Today's COMPLETE patients\n complete = seen.filter(finish_time__isnull=False)\n for a in complete:\n d = {}\n d['id'] = a.id\n d['name'] = a.first_name + ' ' + a.last_name\n h, m, s = to_hms(get_waiting_time(a, now.time()))\n wait_time = \"\" + str(h) + \":\" + str(m) + \":\" + str(s)\n d['wait_time'] = wait_time\n rows.append(d)\n tables['Completed'] = rows\n rows = []\n # Today's IN_SESSION patients\n in_session = seen.filter(finish_time__isnull=True)\n for a in in_session:\n d = {}\n d['id'] = a.id\n d['name'] = a.first_name + ' ' + a.last_name\n h, m, s = to_hms(get_waiting_time(a, now.time()))\n wait_time = \"\" + str(h) + \":\" + str(m) + \":\" + str(s)\n d['wait_time'] = wait_time\n rows.append(d)\n tables['In Session'] = rows\n data['tables'] = tables\n return render(request, 'doctor/appointments.html', data)", "def appointment():\r\n return render_template(\r\n 'about.html',\r\n title='About',\r\n year=datetime.now().year,\r\n message='Your application description page.'\r\n )", "def dashboard(request):\n appointments = AppointmentRequests.objects.all().filter(completed=False)\n return render(request,\"dashboard.html\",{\"appointments\":appointments})", "def create_appointment():\n\n msg = render_template('date')\n return question(msg)", "def baron_schedule(request):\n assert isinstance(request, HttpRequest)\n\n return render(\n request,\n 'AscensionESports_Baseline/schedule.html',\n {\n 'background': getBaronBackground(),\n 'color': getBaronColor(),\n 'title':'Baron League Schedule',\n 'query_results': Baron_Match_Report_Request(request),\n 'year': datetime.now().year,\n }\n )", "def create_appointment(request):\n dates = get_dates()\n users = User.objects.all()\n\n if request.POST:\n new_appointment = create_appointment_form(request, request.POST)\n if new_appointment:\n messages.add_message(request, messages.SUCCESS, 'Your appointment as been created successfully.')\n else:\n messages.add_message(request, messages.ERROR, 'An error occurred. Your appointment could not be created.'\n 'If this error persists, try contacting our service desk at'\n '1-800-RIX-AJAZ')\n return redirect('view_appointments')\n\n return render(request, 'create_appointment.html', {'the_user': request.user,\n 'dates': dates,\n 'users': users,\n 'hours': range(1, 13),\n 'minutes': range(1, 60)})", "def calendar_view(request, calendar_id):\n calendar_obj = Calendar.objects.get(pk=calendar_id)\n try:\n appointments = Appointment.objects.all().filter(calendar=calendar_obj)\n appointments = jsonify(appointments)\n except:\n appointments = []\n calendar_obj = calendar_obj.serialize()\n calendar_obj[\"non_working_days\"] = [day for day in [0, 1, 2, 3, 4, 5, 6] if day not in calendar_obj[\"working_days\"]]\n return render(request, 'calendar_view.html', {'calendar_obj': calendar_obj, 'appointments': appointments})", "def index(request):\n\n\treturn render(request, 'index.html', {})\n\n\t# uncomment this line vvv and comment the above ^^^ line once we cut off scheduling\n\t#return render(request, 'cannot_schedule_anymore.html', {})", "def create_patient_appointment():\n if request.method == 'POST':\n patient_email = request.form['patient_email']\n doctor_email = request.form['doctor_email']\n date = request.form['date']\n time = request.form['time']\n\n response = requests.post(server_url + 'patient/create_appointment', json={\n 'patient_email': patient_email,\n 'doctor_email': doctor_email,\n 'date': date,\n 'time': time\n })\n\n response = response.json()\n\n if response.get('Status') == \"DOCTOR_HAS_AN_APPOINTMENT_SELECTED_TIME_SLOT\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"DOCTOR_IS_NOT_AVAILABLE_AT_THAT_TIME\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"INVALID_PATIENT_EMAIL\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"INVALID_DOCTOR_EMAIL\":\n return render_template('patients/appointment_failed.html')\n else:\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return render_template('patients/dashboard.html')", "def timesheet(request):\r\n return render(\r\n request,\r\n 'timesheet/timesheet.html'\r\n )", "def view_attendance(request):\n\n\tcontext_dict = {\n\t\t'title': 'All Attendance',\n\t}\n\treturn render(request, \"viewAttendance.html\", context_dict)", "def reminder(request):\n return jingo.render(request, 'landings/reminder.html')", "def alarm_page_clock():\n events.run(blocking=False)\n if request.method == 'POST':\n event_name = str(request.values.get('event_name'))\n event_date_time = str(request.values.get('event_date'))\n event_desc = str(request.values.get('event_desc'))\n event_log(\"user submitted data...\",\"event :\"+event_name+\" date :\"+event_date_time+\" description: \"+event_desc+\"\")\n if event_name != \"\" and event_date_time != \"\":#logic for setting boleans so the program can determine what data to display with each event as it expires\n if request.form.get(\"repeat\") is None:\n repeat = \"false\"\n else:\n repeat = \"true\"\n if request.form.get(\"weather\") is None:\n weather = \"false\"\n else:\n weather = \"true\"\n if request.form.get(\"news\") is None:\n news = \"false\"\n else:\n news = \"true\"\n set_alarm(event_date_time.replace(\"T\", \" \"), event_name, event_desc,repeat,weather,news)\n speak_output(event_name + \"has been added to upcomming alarms\")\n return render_template(\"alarm.html\", Events_list=Events_list, notification_list=notification_list)", "def clerk_create_appointment():\n if request.method == 'POST':\n patient_email = request.form['patient_email']\n doctor_email = request.form['doctor_email']\n date = request.form['date']\n time = request.form['time']\n\n response_clerk_create_appointment = requests.post(server_url + 'medical_clerk/create_appointment', json={\n 'patient_email': patient_email,\n 'doctor_email': doctor_email,\n 'date': date,\n 'time': time\n })\n response_clerk_create_appointment = response_clerk_create_appointment.json()\n\n if response_clerk_create_appointment.get('Status') == \"INVALID_DOCTOR_EMAIL\":\n return render_template('clerks/clerk_appointment_failed.html')\n elif response_clerk_create_appointment.get('Status') == \"INVALID_PATIENT_EMAIL\":\n return render_template('clerks/clerk_appointment_failed.html')\n elif response_clerk_create_appointment.get('Status') == \"DOCTOR_IS_NOT_AVAILABLE_AT_THAT_TIME\":\n return render_template('clerks/clerk_appointment_failed.html')\n elif response_clerk_create_appointment.get('Status') == \"DOCTOR_HAS_AN_APPOINTMENT_SELECTED_TIME_SLOT\":\n return render_template('clerks/clerk_appointment_failed.html')\n else:\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return render_template('clerks/home.html')", "def appointment_date(begin_date):\n\n session.attributes['begin_date'] = str(begin_date)\n qs = render_template('time')\n return question(qs)", "def show_events():\n try:\n return render_template(\n 'events.html',\n events=get_events(),\n auth=is_organizer(get_user()),\n app_config=app.config\n )\n except RuntimeError as error:\n return str(error), 500", "def view_appointments(request):\n\n appointments = Appointment.objects.all().order_by('date')\n\n if request.user.userprofile.is_patient():\n appointments = Appointment.objects.filter(patient=request.user.id).order_by('date')\n\n elif request.user.userprofile.is_doctor():\n appointments = Appointment.objects.filter(doctor=request.user.id).order_by('date')\n\n return render(request, 'view_appointments.html', {'appointments': appointments,\n 'the_user': request.user})", "def read_appointments():\n if current_user.is_admin is False:\n appointments = Appointment.query.filter_by(user_id=current_user.id).all()\n else:\n appointments = Appointment.query.all()\n\n return render_template('appointments/index.html.j2', appointments=appointments, title='appointments')", "def create_appointment():\n\n form = AppointmentForm()\n\n if form.validate_on_submit():\n\n appointment = Appointment(\n title = form.title.data,\n description = form.description.data,\n location = form.location.data,\n start = form.start.data,\n client = form.client.data,\n user = current_user\n )\n\n try:\n db.session.add(appointment)\n db.session.commit()\n\n flash('Successfully created the appointment.')\n\n return redirect(url_for('appointment.read_appointments'))\n except:\n flash('Error creating the appointment')\n\n return render_template('appointments/form.html.j2', form=form, title='Create appointment')", "def appointment_list(self, request, **dict):\n\t\tdata = self.get_serializer(self.get_queryset(), many=True).data\n\t\treturn Response(data, status.HTTP_200_OK)", "def follow_workoutplan(request, pk):\n return render(request, 'workouts/starting_date_form.html')", "def schedule(request,status):\n\tnow = datetime.today()\n\tsched = Sample.objects.all()\n\tif status:\n\t\tsched = Sample.objects.filter(status=status)\n\tsched = sched.order_by('date_added')\n\tsched = [(now + timedelta(i+1), s) for i,s in enumerate(sched)]\n\treturn render_to_response('schedule/schedule.html', \n\t\t\t\t\t\t\t{'sched': sched},\n\t\t\t\t\t\t\tcontext_instance=RequestContext(request))", "def save_appointment_details(request, calendar_id):\n def schedule_mail(reminder_date, appointment):\n # Configure our scheduler for reminder\n try:\n trigger = DateTrigger(run_date=reminder_date)\n scheduler.add_job(send_appointment_mail, args=[appointment], trigger=trigger)\n except Exception as exp:\n print(exp)\n \n def schedule_sms(reminder_date, appointment):\n # Configure our scheduler for reminder\n try:\n trigger = DateTrigger(run_date=reminder_date)\n scheduler.add_job(send_appointment_sms, args=[appointment], trigger=trigger)\n except Exception as exp:\n print(exp)\n \n start_time = request.GET['start_time'][:19]\n end_time = request.GET['end_time'][:19]\n \n start_time = datetime.strptime(start_time, \"%Y-%m-%dT%H:%M:%S\")\n end_time=datetime.strptime(end_time, \"%Y-%m-%dT%H:%M:%S\")\n \n calendar_obj = Calendar.objects.get(pk=calendar_id)\n # if this is a POST request we need to process the form data\n if request.method == 'POST':\n\n # create a form instance and populate it with data from the request:\n form = AppointmentForm(request.POST)\n\n # check whether it's valid and save it\n if form.is_valid():\n # Save appointment details\n \n mobilephone = form.data['mobilephone']\n email = form.data['email']\n first_name = form.data['first_name']\n last_name = form.data['last_name']\n notes = form.data['notes']\n\n appointment = Appointment(start_time=start_time, end_time=end_time, first_name=first_name, \n last_name=last_name, email=email, mobilephone=mobilephone, notes=notes)\n \n appointment.calendar = calendar_obj\n appointment.save()\n\n try:\n send_appointment_mail(appointment) # send appointment details email\n except Exception as exp:\n print(exp)\n \n try:\n send_appointment_sms(appointment) # send appointment details sms\n except Exception as exp:\n print(exp)\n \n # Calculate reminder schedule dates\n reminder1 = start_time - timedelta(hours=2)\n reminder2 = start_time - timedelta(hours=24)\n reminder3 = start_time - timedelta(days=7)\n\n # Schedule mails\n schedule_mail(reminder1, appointment)\n schedule_mail(reminder2, appointment)\n schedule_mail(reminder3, appointment)\n \n # Schedule sms\n schedule_sms(reminder1, appointment)\n schedule_sms(reminder2, appointment)\n schedule_sms(reminder3, appointment)\n \n return redirect(reverse('appointment:complete_appointment', args=[calendar_id]))\n \n # if a GET (or any other method) we'll create a blank form\n else:\n form = AppointmentForm()\n return render(request, 'appointment_form.html', {'form': form, 'start_time': start_time, 'end_time': end_time,\n 'office_location': calendar_obj.office_location})", "def meeting(request, meeting_id):\n meeting = get_object_or_404(Meeting, pk=meeting_id)\n context = {'meeting': meeting}\n return render(request, 'sacms/meeting.html', context)", "def appointment_time(begin_time):\n\n session.attributes['begin_time'] = str(begin_time)\n msg = render_template('end_date')\n return question(msg)", "def index(http_request, year=datetime.datetime.now().strftime(\"%Y\"), month=datetime.datetime.now().strftime(\"%m\")):\n\t# make sure the year number and month number are ints\n\tyear = int(year)\n\tmonth = int(month)\n\ttimestamp = datetime.datetime(year, month, 1)\n\t\n\t#initialize container for dates to be stored\n\tdate_list = []\n\t\n\tevents = Event.objects.filter(edate__year=year).filter(edate__month=month)\n\tfor event in events:\n\t\tdate_list.append({'id':event.id, 'day':datetime.date(event.edate.year, event.edate.month, event.edate.day), 'title':event.title, 'class':'event'})\n\n\tprojects = Project.objects.filter(due__year=year).filter(due__month=month)\n\tfor project in projects:\n\t\tdate_list.append({'id':project.id, 'day':datetime.date(project.due.year, project.due.month, project.due.day), 'title':project.name, 'class':'projects'})\n\t\t\t\n\t# next month's timestamp\n\tif month == 12:\n\t\tnext_month = datetime.datetime(year+1, 1, 1)\n\telif month < 12:\n\t\tnext_month = datetime.datetime(year, month+1, 1)\n\t\n\tupcoming_projects = Project.objects.filter(due__year=next_month.year).filter(due__month=next_month.month)\n\t\n\t\n\treturn render_to_response('schedule_cal.html', \n\t\t\t\t {'date_list':date_list, \n\t\t\t\t 'date':timestamp, \n 'urlprefix': urlprefix (),\n\t\t\t\t 'upcoming_projects':upcoming_projects}, \n\t\t\t\t )", "def test_calendar(self):\n response = self.app.get(\"/schedule\")\n self.assertTrue(response.status_code, 200)", "def appointments(request):\n try:\n doctor = Doctor.objects.get(user=request.user)\n except Doctor.DoesNotExist:\n raise Http404(\"Doctor with current user instance not found!\")\n\n #now get the doctors appointments with the most recent first.\n appointments = get_appointments_list(doctor.doctor_appointments.all().order_by(\"-date\"))\n\n return JsonResponse({\n \"appointments\": appointments\n })", "def mainpage():\n return render_template('presence_weekday.html')", "def matchScheduling(request): \n registered = False\n if request.method == 'POST':\n scheduling_form = MatchSchedulingForm(data=request.POST)\n if scheduling_form.is_valid():\n scheduling = scheduling_form.save()\n registered = True\n else:\n print(scheduling_form.errors)\n else:\n scheduling_form = MatchSchedulingForm()\n matches_obj = MatchScheduling.objects.all()\n return render(request,'footBallApp/match_scheduling.html',\n {'scheduling_form':scheduling_form,\n 'registered':registered, 'matches_obj':matches_obj})", "def base_dashboard(request):\n appointments = None\n\n if request.user.userprofile.is_patient():\n appointments = Appointment.objects.filter(patient=request.user.id).order_by('date')\n elif request.user.userprofile.is_doctor():\n appointments = Appointment.objects.filter(doctor=request.user.id).order_by('date')\n else:\n appointments = Appointment.objects.all().order_by('date')\n\n return render(request, 'base_dashboard.html', {'appointments': appointments, 'the_user': request.user})", "def show_events(request):\n event_list = Event.objects.order_by('-date')\n\n event_form = EventForm()\n\n context = {'events': event_list, 'form': event_form}\n return render(request, 'metro_app/events_view.html', context)", "def view_event(request, owner_type, owner_id, event_id):\n # Like before, get the request's context.\n context = RequestContext(request)\n\n user = request.user\n edit_priv = False\n # If it's a HTTP POST, we're interested in processing form data.\n if request.method == 'GET':\n verified_obj = verified_calendar(context, owner_type, owner_id, user)\n if not isinstance(verified_obj, HttpResponse):\n calendar, edit_priv = verified_obj\n else:\n return verified_obj\n\n event = Event.objects.get(id=int(event_id))\n\n #If the event mentioned doesn't belong to the calendar\n if not (event.cal.id == calendar.id):\n return render_permission_denied(context, 'view this event')\n\n else:\n return render_to_response(reverse('index'), {}, context)\n\n # Render the template depending on the context.\n return render_to_response(\n 'scheduler/view_event.html', {'event': event, 'edit_priv': edit_priv, 'owner_type': owner_type},\n context)", "def index():\n # return render_template('index.html', events=get_calendar_events_today(CALENDAR_URL))\n return render_template('index.html', events=get_calendar_events_limit(CALENDAR_URL), events_sorted=True)", "def calendar_view_basic(request, owner_type, owner_id):\n\n # Like before, obtain the context for the user's request.\n context = RequestContext(request)\n\n user = request.user\n profile = get_profile(user)\n user_profile = profile[0]\n\n if request.method == 'GET':\n verified_obj = verified_calendar(context, owner_type, owner_id, user)\n if not isinstance(verified_obj, HttpResponse):\n calendar, edit_priv = verified_obj\n events = calendar.event_set.all()\n else:\n return verified_obj\n\n response_object = {'calendar' : calendar, 'events': events,\n 'edit_priv': edit_priv, 'owner_type': owner_type,\n }\n\n if owner_type == \"user\":\n\n # send school calendar\n profile_school = user_profile.getSchool()\n response_object['school'] = profile_school\n if profile_school:\n response_object['school_events'] = profile_school.cal.event_set.all()\n\n # send course calendars\n if isinstance(user_profile, Instructor):\n profile_courses = Course.objects.filter(creator=user.id)\n else:\n profile_courses = user_profile.courses.all()\n course_calendars = []\n for course in profile_courses:\n course_calendars.append({'course': course, 'events': course.cal.event_set.all()})\n response_object['course_calendars'] = course_calendars;\n return render_to_response('scheduler/calendar_basic.html',\n response_object, context)\n else:\n # No context variables to pass to the template system, hence the\n # blank dictionary object...\n return render_to_response('/login.html', {}, context)", "def home_view(request):\n return HttpResponseRedirect('/schedule/')", "def scheduled_task(page_view_id):\n page_view = session.query(PageView).filter_by(id=page_view_id).one()\n print(f'Pretending to handle {page_view}')", "def touragenda(request):\n active_events = TourAgendaModel.objects.order_by('number')\n friday_events = TourAgendaModel.objects.all().filter(day='FRIDAY')\n saturday_events = TourAgendaModel.objects.all().filter(day='SATURDAY')\n sunday_events = TourAgendaModel.objects.all().filter(day='SUNDAY')\n\n context = {\n 'active_events': active_events,\n 'friday_events': friday_events,\n 'saturday_events': saturday_events,\n 'sunday_events': sunday_events,\n }\n\n return render(request, 'tourAgenda.html', context=context)", "def meetings(request):\n meeting_list = Meeting.objects.order_by('held_date')\n context = {'meeting_list': meeting_list}\n return render(request, 'sacms/meetings.html', context)", "def edit_appointment(request, id):\n users = User.objects.all()\n appointment = get_object_or_404(Appointment, pk=id)\n if request.POST:\n post = request.POST\n date_string = post.get(\"date\") + \"-\" + post.get(\"time\")\n try:\n date = datetime.datetime.strptime(date_string, '%Y-%m-%d-%H:%M')\n appointment.date = date\n except ValueError:\n pass\n the_user = request.user\n notes = post.get(\"notes\")\n appointment.notes = notes\n\n if the_user.userprofile.is_doctor():\n try:\n patient_id = int(post.get(\"patient\", the_user.pk))\n patient = User.objects.get(pk=patient_id)\n appointment.patient = patient\n except ValueError:\n pass\n\n elif request.user.userprofile.is_patient():\n try:\n doctor_id = int(post.get(\"doctor\", the_user.pk))\n doctor = User.objects.get(pk=doctor_id)\n appointment.doctor = doctor\n except ValueError:\n pass\n\n if appointment:\n messages.add_message(request, messages.SUCCESS, 'Your changes have been saved.')\n else:\n messages.add_message(request, messages.ERROR, 'An error occurred. Please contact an admin for assistance.')\n appointment.save()\n return redirect('view_appointments')\n return render(request, 'edit_appointment.html', {'appointment': appointment,\n 'the_user': request.user,\n 'users': users})", "def navebarre_vent(request):\r\n return render(request, 'menu/navebarre_vent.html')", "def displayWorkout():\n\n return render_template(\"workout.html\")", "def tentative_schedule(request):\n\n\tshows_dict = {\n\t\t0: [],\n\t\t1: [],\n\t\t2: [],\n\t\t3: [],\n\t\t4: [],\n\t\t5: [],\n\t\t6: []\n\t}\n\n\tfor i in range(7):\n\t\tfor show in Show.objects.filter(day=i).order_by('time'):\n\t\t\t\tshow_time = show.time\n\t\t\t\tdj = str(show.dj)\n\t\t\t\tif show.co_dj and str(show.co_dj) != \"Unknown Dj\":\n\t\t\t\t\tdj += \" & \" + str(show.co_dj)\n\t\t\t\tshows_dict[i].append([dj, show_time.strftime('%I:%M %p')])\n\n\treturn render(request, 'tentative_schedule.html', {\n\t\t\t'shows_dict': shows_dict\n\t})", "def timesheet_all(request):\r\n return render(\r\n request,\r\n 'timesheet/timesheet_all.html'\r\n )", "def attendee_notify(request, event_id):\n return render_to_response('gr/attendee_notify.html', \\\n\t context_instance=RequestContext(request))", "def upcoming_ical(request):\n try:\n filter_params = parse_agenda_filter_params(request.GET)\n except ValueError as e:\n return HttpResponseBadRequest(str(e))\n \n today = datetime.date.today()\n\n # get meetings starting 7 days ago -- we'll filter out sessions in the past further down\n meetings = data_for_meetings_overview(Meeting.objects.filter(date__gte=today-datetime.timedelta(days=7)).prefetch_related('schedule').order_by('date'))\n\n assignments = list(SchedTimeSessAssignment.objects.filter(\n schedule__in=[m.schedule_id for m in meetings] + [m.schedule.base_id for m in meetings if m.schedule],\n session__in=[s.pk for m in meetings for s in m.sessions if m.type_id != 'ietf'],\n timeslot__time__gte=today,\n ).order_by(\n 'schedule__meeting__date', 'session__type', 'timeslot__time'\n ).select_related(\n 'session__group', 'session__group__parent', 'timeslot', 'schedule', 'schedule__meeting'\n ).distinct())\n\n AgendaKeywordTagger(assignments=assignments).apply()\n\n # apply filters\n if filter_params is not None:\n assignments = [a for a in assignments if should_include_assignment(filter_params, a)]\n\n # we already collected sessions with current_status, so reuse those\n sessions = {s.pk: s for m in meetings for s in m.sessions}\n for a in assignments:\n if a.session_id is not None:\n a.session = sessions.get(a.session_id) or a.session\n a.session.ical_status = ical_session_status(a)\n\n # handle IETFs separately\n ietfs = [m for m in meetings if m.type_id == 'ietf']\n preprocess_meeting_important_dates(ietfs)\n\n # icalendar response file should have '\\r\\n' line endings per RFC5545\n response = render_to_string('meeting/upcoming.ics', {\n 'vtimezones': ''.join(sorted(list({meeting.vtimezone() for meeting in meetings if meeting.vtimezone()}))),\n 'assignments': assignments,\n 'ietfs': ietfs,\n }, request=request)\n response = re.sub(\"\\r(?!\\n)|(?<!\\r)\\n\", \"\\r\\n\", response)\n\n response = HttpResponse(response, content_type='text/calendar')\n response['Content-Disposition'] = 'attachment; filename=\"upcoming.ics\"'\n return response", "def general_timeline():\n return render_template('timeline.html', general=True, show_username=True)", "def analysis():\n\n response_all_doctors_and_appointments = requests.post(server_url + 'doctor/all_doctors_and_all_appointments')\n doctors_and_appointments = response_all_doctors_and_appointments.json()\n\n return render_template('clerks/analysis.html', doctors_and_appointments=doctors_and_appointments)", "def test_calendar_view_list(self):\n response = self.client.get('/module/calendar/')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'frontend/appointment/calendar/list.html')\n\n request = self.factory.get('/module/calendar/')\n request.user = self.user\n request.session = {}\n response = calendar_list(request)\n self.assertEqual(response.status_code, 200)", "def patient_done(request, appointment_id):\n appt_obj = Appointment.objects.get(pk=appointment_id)\n if appt_obj.finish_time is None:\n # we have not set this appointment as complete yet\n appt_obj.finish_time = timezone.localtime(timezone.now()).time()\n response = patch_appointment_status(request, appointment_id, COMPLETE)\n if response != 204:\n return index(request, {'msg': err_patch})\n appt_obj.save()\n # TODO: Create a notes log to allow doctor to view history and revert\n return HttpResponseRedirect(reverse('doctor:appointments'))", "def event_detail():\n # getting event id from homepage \n event_id = request.args.get('eventId')\n # counting the total number of registeration for an event.\n registrant_count = db.session.query(Register).filter(Register.event_id ==event_id).count()\n event = db.session.query(Event).filter(Event.event_id == event_id).first()\n format = '%a %I:%M %p %b %d, %y'\n event.date = event.date.strftime(format)\n event.time = event.time.strftime(format)\n location = event.location\n return render_template(\"event.html\", event= event, registrant_count=registrant_count)", "def test_appointment_date(self):\n appt_date = datetime.date.today() + datetime.timedelta(days=7) # Default for email\n reminders.Patient.objects.filter(\n pk__in=[self.test_patient.pk, self.other_patient.pk]\n ).update(next_visit=appt_date)\n confirmed = self.create_confirmed_notification(self.test_patient, appt_date)\n unconfirmed = self.create_unconfirmed_notification(self.other_patient, appt_date)\n\n self.startRouter()\n self.router.logger.setLevel(logging.DEBUG)\n\n # run email job\n from afrims.apps.reminders.app import daily_email_callback\n daily_email_callback(self.router)\n\n self.assertEqual(len(mail.outbox), 1)\n message = mail.outbox[0]\n self.assertPatientInMessage(message, self.test_patient)\n self.assertPatientInMessage(message, self.other_patient)\n self.assertPatientNotInMessage(message, self.unrelated_patient)\n self.stopRouter()", "def present_view(self, confirmation=False, error=None):\n if confirmation:\n input(\"The entry has been added. Press Enter to continue\")\n return\n if error:\n print(\n \"\\n** ERROR **\\n{}\\n\\nPlease try again\".format(\n \"\\n\".join(f\"{k}: {' '.join(v)}\" for k, v in error.messages.items())\n )\n )\n print(self._layout)\n task = {\n \"date\": input(\"Enter date (DD/MM/YYYY): \"),\n \"title\": input(\"Task Title: \"),\n \"time_spent\": input(\"Time spent (rounded minutes): \"),\n \"notes\": input(\"Notes (Optional): \"),\n }\n return task", "def look_vacant_offices(request):\n if request.GET:\n if request.GET['datetime_from'] and request.GET['datetime_to']:\n offices = NumberOffice.objects.all()\n reservations = Reservation.objects.all()\n post_from = request.GET['datetime_from']\n post_to = request.GET['datetime_to']\n filteroffice = reservations.all().filter(\n datetime_from__gte=post_from, datetime_to__lte=post_to\n )\n reservednumberoffice = set()\n # set reserved office for corect time\n for i in filteroffice:\n reservednumberoffice.add(i.number_office)\n context = {'offices': offices, \"reservednumberoffice\": reservednumberoffice}\n return render(request, 'coworkings/vacant_offices.html', context)\n else:\n text = 'Enter the correct data or fill in all fields.'\n context = {'text': text}\n return render(request, 'coworkings/look_vacant_offices.html', context)\n else:\n return render(request, 'coworkings/look_vacant_offices.html')", "def index(request):\r\n return render(request, 'team_tasks_managers/index.html')", "def view_event_list(request, **kwargs):\n #lu = get_common_lookup(request)\n lu = { 'page_title' : 'MCB Event Tweets'\\\n , 'IS_TWEET_EVENT_PAGE' : True\n , 'TWEET_SUCCESS' : kwargs.get('success_msg', False)\n }\n \n if not request.user.is_authenticated():\n return HttpResponse('not logged in')\n \n if not is_user_in_group(request, TWEET_GROUP_NAME):\n return HttpResponse('not in tweet group')\n \n upcoming_events = MCBTweetEvent.get_events_awaiting_approval()\n \n lu.update({ 'upcoming_events' : upcoming_events\\\n #, 'my_checked_codes' : get_previously_checked_expense_codes(request)\\\n })\n #\n return render_to_response('tweet/events/event_list.html', lu, context_instance=RequestContext(request))", "def index(request):\n try:\n meeting = Meeting.objects.latest('when')\n num_rsvped = Person.objects.filter(ynm='Y').count()\n\n except (KeyError, Meeting.DoesNotExist, Person.DoesNotExist):\n raise Http404\n\n return render(request,'chipy/chipy.html',{'meeting':meeting,'num_rsvped':num_rsvped})", "def interested_act(request):\n assert isinstance(request, HttpRequest)\n return render(\n request,\n 'app/interested.html',\n context_instance=RequestContext(request,\n {\n 'title': 'Which Act are you interested in?',\n 'year': datetime.now().year,\n })\n )", "def print_event_report(self):\n data = {\n 'ids': self.ids,\n 'model': self._name,\n 'form': {\n 'event_start_date': self.event_start_date,\n 'event_end_date': self.event_end_date,\n 'agenda': self.env.context.get('default_agenda_id'),\n },\n }\n return self.env.ref('agenda_esi.recap_report').report_action(self, data=data)", "def refresh_wait(request):\n now = timezone.localtime(timezone.now())\n appointments = Appointment.objects.filter(\n checkin_date__iexact=now.date())\n count = appointments.count()\n total_wait = sum([get_waiting_time(appt, now.time()) for appt in appointments])\n if count == 0:\n average_seconds = 0\n else:\n average_seconds = total_wait / count\n h, m, s = to_hms(average_seconds)\n data = {'hours': h,\n 'minutes': m,\n 'seconds': s}\n return JsonResponse(data) # JsonResponse; returned to $.ajax", "def agenda_ical(request, num=None, name=None, acronym=None, session_id=None):\n meeting = get_meeting(num, type_in=None)\n schedule = get_schedule(meeting, name)\n updated = meeting.updated()\n\n if schedule is None and acronym is None and session_id is None:\n raise Http404\n\n assignments = SchedTimeSessAssignment.objects.filter(\n schedule__in=[schedule, schedule.base],\n session__on_agenda=True,\n )\n assignments = preprocess_assignments_for_agenda(assignments, meeting)\n AgendaKeywordTagger(assignments=assignments).apply()\n\n try:\n filt_params = parse_agenda_filter_params(request.GET)\n except ValueError as e:\n return HttpResponseBadRequest(str(e))\n\n if filt_params is not None:\n # Apply the filter\n assignments = [a for a in assignments if should_include_assignment(filt_params, a)]\n\n if acronym:\n assignments = [ a for a in assignments if a.session.historic_group and a.session.historic_group.acronym == acronym ]\n elif session_id:\n assignments = [ a for a in assignments if a.session_id == int(session_id) ]\n\n for a in assignments:\n if a.session:\n a.session.ical_status = ical_session_status(a)\n\n return render(request, \"meeting/agenda.ics\", {\n \"schedule\": schedule,\n \"assignments\": assignments,\n \"updated\": updated\n }, content_type=\"text/calendar\")", "def archives(request):\n template_var = base_template_vals(request)\n try:\n template_var[\"events\"] = Event.objects.all().filter(\n is_approved=True).order_by(\"-event_time\")\n except Event.DoesNotExist:\n raise Http404\n return render_to_response(\"event/event_listview.html\", template_var,\n context_instance=RequestContext(request))", "def schedule_content(request):\r\n\r\n stories = Story.objects.filter(organization=request.user.organization).exclude(archived=True)\r\n\r\n # data = {}\r\n # data['success'] = 1\r\n # data['result'] = []\r\n data = []\r\n\r\n for story in stories:\r\n # Facet Schedules\r\n for facet in story.facetstory.all():\r\n credit = {}\r\n for user in facet.credit.all():\r\n credit['id'] = []\r\n credit['id'].append(user.credit_name)\r\n credit['id'].append(user.get_absolute_url())\r\n editor = {}\r\n for user in facet.editor.all():\r\n editor['id'] = []\r\n editor['id'].append(user.credit_name)\r\n editor['id'].append(user.get_absolute_url())\r\n print credit\r\n if facet.due_edit:\r\n edit_event_dict = {}\r\n edit_event_dict['id'] = facet.id\r\n edit_event_dict['title'] = facet.name.encode('utf-8')\r\n edit_event_dict['description'] = facet.description.encode('utf-8')\r\n edit_event_dict['due-edit'] = time.mktime(facet.due_edit.timetuple())\r\n edit_event_dict['editor'] = facet.editor.credit_name\r\n edit_event_dict['credit'] = credit\r\n edit_event_dict['url'] = facet.get_absolute_url()\r\n edit_event_dict['start'] = time.mktime(facet.due_edit.timetuple()) * 1000\r\n edit_event_dict['end'] = (time.mktime(facet.due_edit.timetuple()) * 1000) + 60\r\n edit_event_dict['overlap'] = True\r\n edit_event_dict['allDay'] = False\r\n edit_event_dict['backgroundColor'] = '#00aced'\r\n edit_event_dict['textColor'] = '#fff'\r\n data.append(edit_event_dict)\r\n if facet.run_date:\r\n run_event_dict = {}\r\n run_event_dict['id'] = facet.id\r\n run_event_dict['title'] = facet.name.encode('utf-8')\r\n run_event_dict['description'] = facet.description.encode('utf-8')\r\n run_event_dict['due-edit'] = time.mktime(facet.due_edit.timetuple())\r\n run_event_dict['editor'] = facet.editor.credit_name\r\n run_event_dict['credit'] = credit\r\n run_event_dict['url'] = facet.get_absolute_url()\r\n run_event_dict['class'] = 'event_run'\r\n run_event_dict['start'] = time.mktime(facet.run_date.timetuple()) * 1000\r\n run_event_dict['end'] = (time.mktime(facet.run_date.timetuple()) * 1000) + 60\r\n run_event_dict['overlap'] = True\r\n run_event_dict['backgroundColor'] = '#5cb85c'\r\n run_event_dict['textColor'] = '#fff'\r\n data.append(run_event_dict)\r\n\r\n # print \"DATA: \", data\r\n\r\n return HttpResponse(json.dumps(data), content_type='application/json')", "def scheduleMe(page):\n querystring_combos = request.cookies.get('course_combos')\n if not querystring_combos:\n return render_template('404.html'), 404\n combos = json.loads(querystring_combos)\n #print querystring_combos\n\n count = len(combos)\n pagination_needed = count > PER_PAGE\n this_page_combos = combos\n if pagination_needed:\n this_page_combos = getCombosForPage(page, PER_PAGE, count, combos)\n last_page = isLastPage(page, count, PER_PAGE)\n if not this_page_combos and page != 1:\n return render_template('404.html'), 404\n return render_template(\"sched.html\",\n title=\"Scheduler\",\n combos=this_page_combos,\n combo_amount=str(count),\n page=page,\n last_page=last_page,\n pagination=pagination_needed)", "def see_patient(self, request):\n appt = self.get_object() # Appointment object\n appt.status = IN_SESSION\n appt.seen_time = timezone.localtime(timezone.now()).time()\n response = patch_appointment_status(request, appt.id, IN_SESSION)\n if response:\n appt.save()\n return None\n return err_patch", "def show_rsd_thing(request, date, plain=False,\n regular=False, events=False, lost_and_found=False, jobs=False):\n \n if regular:\n regular = Announcement.regular.running_on(date).order_by('-date_start', 'pk')\n if events:\n events = Announcement.events.running_on(date).order_by('event_date', 'event_time', 'pk')\n if lost_and_found:\n lost_and_found = Announcement.lost_and_found.running_on(date).order_by('-date_start', 'pk')\n if jobs:\n midnight = datetime.datetime.combine(date, datetime.time(23, 59))\n jobs = JobListing.published.order_by('is_filled', '-pub_date') \\\n .filter(pub_date__lte=midnight) \\\n .filter(pub_date__gte=midnight - datetime.timedelta(days=7))\n if date == datetime.date.today():\n jobs = jobs.filter(is_filled=False)\n \n if not any(x.count() if x else 0 for x in (regular, events, lost_and_found, jobs)):\n raise Http404\n \n tomorrow = date + datetime.timedelta(days=1)\n comments = PublicComment.visible.filter(time__lt=tomorrow).order_by('-time')\n \n order = lambda x, *ord: x.order_by(*ord) if x else []\n data = {\n 'year': date.year, 'month': date.month, 'day': date.day, 'date': date,\n 'announcements': regular or [],\n 'events': events or [],\n 'jobs': jobs or [],\n 'lost_and_found': lost_and_found or [],\n 'comments': comments[:3],\n 'stories': Article.published.order_by('-pub_date').filter(is_racy=False)[:3],\n 'for_email': boolean_arg(request.GET.get('for_email', ''), False),\n }\n template = \"issue/rsd.\" + ('txt' if plain else 'html')\n return render_to_response(template, data)", "def my_schedule(request,username):\n\n user = get_object_or_404(User, username=username)\n user_profile = UserProfile.objects.get_or_create(user=user)[0]\n weekly_schedule = WeeklySchedule.objects.filter(user_profile=user_profile)\n\n userScheduleInlineFormSet = inlineformset_factory(UserProfile, WeeklySchedule,\n fields=('day_of_week', 'time_from', 'time_to'),\n extra=1, can_delete=True)\n\n # prepare data for rendering in table\n user_schedule = weekly_schedule.values_list('day_of_week','time_from','time_to')\n rows = pivot_schedule(user_schedule)\n\n if request.method == 'POST':\n formset = userScheduleInlineFormSet(request.POST, instance=user_profile,)\n if formset.is_valid():\n formset.save()\n return redirect('my_schedule', user.username)\n else:\n formset = userScheduleInlineFormSet(instance=user_profile,)\n\n return render(\n request,\n 'schedule/myschedule.html',\n {\n 'formset': formset,\n 'days_of_week': WeeklySchedule.DAY_OF_WEEK,\n 'data': rows,\n }\n )", "def create_appointment_form(request, post):\n # string_date = \"{0}-{1}-{2}\".format(year, month, day)\n # date = datetime.datetime.strptime(string_date, '%Y-%m-%d').date()\n new_appointment = None\n date_string = post.get(\"date\") + \"-\" + post.get(\"time\")\n date = datetime.datetime.strptime(date_string, '%Y-%m-%d-%H:%M')\n the_user = request.user\n notes = post.get(\"notes\")\n\n if the_user.userprofile.is_doctor():\n patient_id = int(post.get(\"patient\", the_user.pk))\n patient = User.objects.get(pk=patient_id)\n doctor = User.objects.get(pk=the_user.id)\n new_appointment = Appointment.objects.create(date=date, doctor=doctor, patient=patient, notes=notes)\n\n elif request.user.userprofile.is_patient():\n doctor_id = int(post.get(\"doctor\", the_user.pk))\n doctor = User.objects.get(pk=doctor_id)\n patient = User.objects.get(pk=the_user.id)\n new_appointment = Appointment.objects.create(date=date, doctor=doctor, patient=patient, notes=notes)\n\n return new_appointment", "def appointment_stats(request):\n # TODO: Consider/Look into Django cache framework\n # Default is to load up yesterday's stats\n data = {}\n if request.GET.get('lookback'):\n data['lookback'] = request.GET.get('lookback')\n appointments = get_appointments_word(request.GET.get('lookback'))\n else:\n data['lookback'] = 'yesterday'\n appointments = get_appointments_word('yesterday')\n data.update(get_appointment_stats(appointments))\n return render(request, 'doctor/stats.html', data)", "def cant_view_event(self, event, request):\n return render(request, self.template_name, {\n 'error': \"Not a public event\",\n 'event': None,\n })", "def schedule_text(self):\n self.check_editable()\n title = safe_unicode(self.request.get('title'))\n if not title:\n return JSONResponse(self.request).error(\n _('empty_proposal', default=u\"Proposal must not be empty.\")\n ).proceed().dump()\n\n if is_word_meeting_implementation_enabled():\n try:\n self.meeting.schedule_ad_hoc(title)\n except MissingAdHocTemplate:\n return JSONResponse(self.request).error(\n _('missing_ad_hoc_template',\n default=u\"No ad-hoc agenda-item template has been \"\n u\"configured.\")\n ).remain().dump()\n except MissingMeetingDossierPermissions:\n return JSONResponse(self.request).error(\n _('error_no_permission_to_add_document',\n default=u'Insufficient privileges to add a'\n u' document to the meeting dossier.')\n ).remain().dump()\n\n else:\n self.meeting.schedule_text(title)\n\n return JSONResponse(self.request).info(\n _('text_added', default=u\"Text successfully added.\")).proceed().dump()", "def showtask(id):\n\n tasks = Task.query.filter_by(id=id)\n return render_template('home/taskshowall/dashboard_showtask.html',tasks=tasks,title=\"tasks\")", "def attendingAamas(request):\n # Test Comment\n assert isinstance(request, HttpRequest)\n return render(\n request,\n 'attendingAamas.html',\n context_instance=RequestContext(request, {})\n )", "def view_event(request, event_id):\n event = get_object_or_404(Event, pk=event_id)\n context = {'event': event }\n return render_to_response('event_view.html',\n context,\n context_instance=RequestContext(request))", "def json_frapp(request):\n from pv.settings import MEDIA_URL\n\n if request.GET.get('date') == None:\n start = datetime.combine(date.today(), time(0, 0))\n else:\n start = datetime.combine( datetime.strptime(request.GET.get('date'), '%Y-%m-%d').date(), time(0, 0))\n\n end = datetime.combine(start, time(23, 59))\n\n timeslots = TimeSlot.objects.filter(start__gte=start,start__lte=end).select_related('show').order_by('start')\n\n\n '''Generate categories object for output'''\n\n categories = Category.objects.all()\n categories_output = []\n\n for c in categories:\n c_entry = {\n 'id': c.id,\n 'color': c.color.replace('#', '').upper(),\n 'namedisplay': c.category,\n 'description': c.description\n }\n\n categories_output.append(c_entry)\n\n # Get all series for timeslots\n series = set()\n for ts in timeslots:\n series.add(ts.show)\n\n\n '''Generate series object for output'''\n\n series_output = []\n\n for s in series:\n metainfos = []\n metainfos.append({ 'key': 'ProduzentIn', 'value': ', '.join(ts.show.hosts.values_list('name', flat=True)) })\n metainfos.append({ 'key': 'E-Mail', 'value': ', '.join(ts.show.hosts.values_list('email', flat=True)) })\n\n image = '' if s.image.name == None or s.image.name == '' else str(get_current_site(request)) + MEDIA_URL + s.image.name\n url = '' if s.website == None or s.website == '' else s.website\n\n # Get active schedules for the given date\n # But include upcoming single timeslots (with rrule_id=1)\n schedules = Schedule.objects.filter( Q(show=s.id,is_repetition=False) &\n (\n Q(rrule_id__gt=1,dstart__lte=start,until__gte=start) |\n Q(rrule_id=1,dstart__gte=start)\n )\n )\n\n schedules_repetition = Schedule.objects.filter( Q(show=s.id,is_repetition=True) &\n (\n Q(rrule_id__gt=1,dstart__lte=start,until__gte=start) |\n Q(rrule_id=1,dstart__gte=start)\n )\n )\n\n broadcastinfos = ''\n\n if not schedules.exists():\n continue\n\n for schedule in schedules:\n broadcastinfos = broadcastinfos + generate_frapp_broadcastinfos(schedule)\n\n if schedules_repetition.exists():\n broadcastinfos = broadcastinfos + 'Wiederholung jeweils:'\n for schedule in schedules_repetition:\n broadcastinfos = broadcastinfos + generate_frapp_broadcastinfos(schedule)\n\n s_entry = {\n 'id': s.id,\n 'categoryid': s.category.values_list('id', flat=True)[0],\n 'color': s.category.values_list('color', flat=True)[0].replace('#', '').upper(),\n 'namedisplay': s.name,\n 'description': s.description,\n 'url': url,\n 'image': image,\n 'broadcastinfos': broadcastinfos,\n 'metainfos': metainfos\n }\n\n series_output.append(s_entry)\n\n\n '''Generate shows object for output'''\n\n shows_output = []\n\n for ts in timeslots:\n\n is_repetition = ' ' + _('REP') if ts.schedule.is_repetition is 1 else ''\n namedisplay = ts.show.name + is_repetition\n description = ts.show.description\n url = str(get_current_site(request)) + '/shows/' + ts.show.slug\n urlmp3 = ''\n\n # If there's a note to the timeslot use its title, description and url\n try:\n note = Note.objects.get(timeslot=ts.id)\n namedisplay = note.title + is_repetition\n description = note.content\n url = str(get_current_site(request)) + '/notes/' + note.slug\n urlmp3 = note.audio_url\n except ObjectDoesNotExist:\n pass\n\n ts_entry = {\n 'id': ts.id,\n 'seriesid': ts.show.id,\n 'datetimestart': ts.start.strftime('%d.%m.%Y %H:%M:%S'),\n 'datetimeend': ts.end.strftime('%d.%m.%Y %H:%M:%S'),\n 'namedisplay': namedisplay,\n 'description': description,\n 'url': url,\n 'urlmp3': urlmp3,\n }\n\n shows_output.append(ts_entry)\n\n output = {}\n output['categories'] = categories_output\n output['series'] = series_output\n output['shows'] = shows_output\n\n return HttpResponse(json.dumps(output, ensure_ascii=False).encode('utf8'),\n content_type=\"application/json; charset=utf-8\")", "def update_appointment(request,pk):\n appointment = AppointmentRequests.objects.get(id=pk)\n form = AppointmentUpdate(instance=appointment)\n if request.method == \"POST\":\n form = AppointmentUpdate(request.POST,instance=appointment)\n if form.is_valid():\n form.save()\n return redirect(\"dashboard\")\n else:\n messages.info(request,\"Invalid Data sent, Make sure you provided right data.\")\n return redirect(\"update_appointment\",pk=pk)\n else:\n return render(request,\"update_appointment.html\",{\"form\":form})", "def create_appointments(\n data: AppointmentCreate,\n background_tasks: BackgroundTasks, \n user: User = Depends(deps.get_user),\n db: Session = Depends(deps.get_db),\n rdc: RedisCache = Depends(deps.get_redis)\n) -> Any:\n db_provider = crud_user.get_user_by_id(db, str(data.provider_id))\n if not db_provider:\n raise HTTPException(\n status_code=404, \n detail=\"Cabeleireiro nรฃo encontrado\"\n )\n\n current_date = datetime.now()\n compare_date = data.date.replace(tzinfo=None)\n if compare_date < current_date:\n raise HTTPException(\n status_code=400, \n detail=\"Vocรช nรฃo pode marcar agendamento em datas passadas\"\n )\n \n if data.date.hour < 8 or data.date.hour > 17:\n raise HTTPException(\n status_code=400, \n detail=\"Vocรช sรณ pode cria agendamentos entre 8:00 e 17:00\"\n )\n\n if data.provider_id == user.id:\n raise HTTPException(\n status_code=400, \n detail=\"Vocรช nรฃo pode marca agendamento consigo mesmo\"\n )\n\n validate_date = crud_appointment.get_appointment_by_date(db, data.provider_id, data.date)\n if validate_date:\n raise HTTPException(status_code=400, detail=\"Este horario jรก esta agendado\")\n\n appointment = crud_appointment.create(db, data, user)\n msg = f\"Novo agendamento de {user.name} {user.surname} para o {date.format_date(data.date)}\"\n background_tasks.add_task(crud_notification.create, str(data.provider_id), msg)\n date_time = data.date\n rdc.invalidate_cache(\n f\"providers-appointments:{data.provider_id}:{date_time.year}:{date_time.month}:{date_time.day}\"\n )\n rdc.invalidate_cache(f\"user-appointments:{user.id}\")\n\n return appointment", "def set_available_time_slot():\n if request.content_type != 'application/json':\n error = json.dumps({'error': 'Invalid Content Type'})\n return make_response(error, 400, InterviewCalendarApi.HEADERS)\n\n data = request.json\n # For Temporary purpose, stored in flat file database\n with open(InterviewCalendarApi.DB_FILE, \"a+\") as fd:\n record = \"%s|%s|%s|%s\\n\" %(data[\"Category\"], data[\"Name\"],\n data[\"Email\"], \",\".join(data[\"AvailablityDateTime\"]))\n fd.write(record)\n msg = json.dumps({\"Status\": \"Success\"})\n return make_response(msg, 200, InterviewCalendarApi.HEADERS)", "def show_timeline(\n request: HttpRequest,\n pk: Optional[int] = None,\n workflow: Optional[Workflow] = None,\n) -> HttpResponse:\n action = None\n if pk:\n action = workflow.actions.filter(pk=pk).first()\n\n if not action:\n # The action is not part of the selected workflow\n return redirect('home')\n logs = workflow.logs.filter(payload__action_id=action.id)\n else:\n logs = workflow.logs\n\n event_names = [\n Log.SCHEDULE_EMAIL_EXECUTE,\n Log.DOWNLOAD_ZIP_ACTION,\n Log.SCHEDULE_JSON_EXECUTE,\n Log.SCHEDULE_CANVAS_EMAIL_EXECUTE,\n Log.SCHEDULE_EMAIL_EDIT,\n Log.SCHEDULE_JSON_EDIT,\n Log.SCHEDULE_CANVAS_EMAIL_EXECUTE,\n Log.SURVEY_INPUT,\n ]\n\n # Filter the logs to display and transform into values (process the json\n # and the long value for the log name\n logs = [\n {'id': log.id,\n 'name': log.get_name_display(),\n 'modified': log.modified,\n 'payload': json.dumps(log.payload, indent=2),\n 'action_name': log.payload['action'],\n 'action_id': log.payload['action_id']}\n for log in logs.filter(name__in=event_names)\n ]\n\n return render(\n request,\n 'action/timeline.html',\n {'event_list': logs, 'action': action})", "def route100days():\n return render_template(\"100days.html.j2\")", "def event(id):\n form = ContactForm()\n event = Event.query.get_or_404(id)\n other_media = {\"video\": event.video, \"misc_image_paths\": event.misc_images()}\n packages = event.packages.all()\n # commented out because the fake data generated for the demo of\n # this app by the Faker package may inadvertently contain real email addresses\n if form.validate_on_submit():\n # send_email(\n # organizer.email,\n # f\"Event Inquiry - {form.subject.data}\",\n # \"events/email/contact_organizer\",\n # organizer=organizer,\n # form=form,\n # event=event,\n # )\n flash(\"Your email was sent to the event organizer.\", \"success\")\n return redirect(url_for(\"events.event\", id=id))\n return render_template(\n \"events/event.html\",\n event=event,\n venue=event.venue,\n organizer=event.user,\n packages=packages,\n form=form,\n date_format=\"%m/%d/%Y\",\n main_image=event.main_image(),\n time_format=\"%I:%M %p\",\n other_media=other_media,\n )", "def render(self, task: \"TaskView\") -> Any:\n return None", "def test_alarm_view_list(self):\n response = self.client.get('/module/alarm/')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'frontend/appointment/alarm/list.html')\n\n request = self.factory.get('/module/alarm/')\n request.user = self.user\n request.session = {}\n response = alarm_list(request)\n self.assertEqual(response.status_code, 200)", "def test_event_view_list(self):\n response = self.client.get('/module/event/')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'frontend/appointment/event/list.html')\n\n request = self.factory.get('/module/calendar/')\n request.user = self.user\n request.session = {}\n response = event_list(request)\n self.assertEqual(response.status_code, 200)", "def test_appointment_date(self):\n # Default for email\n appt_date = datetime.date.today() + datetime.timedelta(days=7) \n self.create_confirmed_notification(self.test_patient, appt_date)\n self.create_unconfirmed_notification(self.other_patient, appt_date)\n\n # run email job\n from aremind.apps.reminders.app import daily_email_callback\n daily_email_callback(self.router)\n\n self.assertEqual(len(mail.outbox), 1)\n message = mail.outbox[0]\n self.assertPatientInMessage(message, self.test_patient)\n self.assertPatientInMessage(message, self.other_patient)\n self.assertPatientNotInMessage(message, self.unrelated_patient)", "def show_evts(bus_id):\n\n events = crud.get_bus_evts(bus_id)\n business = crud.get_bus_by_id(bus_id)\n\n return render_template('/business_events.html', events=events, business=business)", "def homepage(request):\n template_var = base_template_vals(request)\n template_var[\"events\"] = Event.objects.filter(is_approved=True\n ).order_by(\"-created\")\n \n return render_to_response(\"event/event_homepage.html\", template_var,\n context_instance=RequestContext(request))", "def myevents(request):\n context = RequestContext(request)\n\n user = request.user\n\n return render_to_response('myevents.html', context)", "def calendar(request, year=None, month=None):\n today = datetime.date.today()\n year = int(year) if year else today.year\n month = int(month) if month else today.month\n try:\n first_of_month = datetime.date(year, month, 1)\n except ValueError: # Not a valid year and month\n raise Http404\n\n events = Event.objects.filter(event_start__year=year, event_start__month=month)\n cal = EventCalendar(events, year, month).formatmonth(year, month)\n\n user = request.user\n future_attending_events = attending_events(user, today)\n\n months = year * 12 + month - 1 # months since epoch (Christ)\n month_list = [\n datetime.date(m // 12, m % 12 + 1, 1) for m in range(months - 5, months + 7)\n ]\n\n # Get some random dates in the current, next, and previous month.\n # These dates are used load the calendar for that month.\n # * prev is some day in the previous month\n # * this is some day in this month\n # * next is some day in the next month\n context = {\n \"calendar\": mark_safe(cal),\n \"prev\": first_of_month - datetime.timedelta(27),\n \"this\": first_of_month,\n \"next\": first_of_month + datetime.timedelta(32),\n \"future_attending_events\": future_attending_events,\n \"month_list\": month_list,\n }\n\n return render(request, \"events/event_list.html\", context)", "def edit_meeting_schedule(request, num=None, owner=None, name=None):\n # Need to coordinate this list with types of session requests\n # that can be created (see, e.g., SessionQuerySet.requests())\n IGNORE_TIMESLOT_TYPES = ('offagenda', 'reserved', 'unavail')\n meeting = get_meeting(num)\n if name is None:\n schedule = meeting.schedule\n else:\n schedule = get_schedule_by_name(meeting, get_person_by_email(owner), name)\n\n if schedule is None:\n raise Http404(\"No meeting information for meeting %s owner %s schedule %s available\" % (num, owner, name))\n\n can_see, can_edit, secretariat = schedule_permissions(meeting, schedule, request.user)\n\n lock_time = settings.MEETING_SESSION_LOCK_TIME\n def timeslot_locked(ts):\n meeting_now = now().astimezone(pytz.timezone(meeting.time_zone))\n if not settings.USE_TZ:\n meeting_now = meeting_now.replace(tzinfo=None)\n return schedule.is_official and (ts.time - meeting_now < lock_time)\n\n if not can_see:\n if request.method == 'POST':\n permission_denied(request, \"Can't view this schedule.\")\n\n return render(request, \"meeting/private_schedule.html\", {\n \"schedule\":schedule,\n \"meeting\": meeting,\n \"meeting_base_url\": request.build_absolute_uri(meeting.base_url()),\n \"hide_menu\": True\n }, status=403, content_type=\"text/html\")\n\n # See if we were given one or more 'type' query string parameters. If so, filter to that timeslot type.\n if 'type' in request.GET:\n include_timeslot_types = request.GET.getlist('type')\n else:\n include_timeslot_types = None # disables filtering by type (other than IGNORE_TIMESLOT_TYPES)\n\n assignments = SchedTimeSessAssignment.objects.filter(\n schedule__in=[schedule, schedule.base],\n timeslot__location__isnull=False,\n )\n if include_timeslot_types is not None:\n assignments = assignments.filter(session__type__in=include_timeslot_types)\n assignments = assignments.order_by('timeslot__time','timeslot__name')\n\n assignments_by_session = defaultdict(list)\n for a in assignments:\n assignments_by_session[a.session_id].append(a)\n\n tombstone_states = ['canceled', 'canceledpa', 'resched']\n\n sessions = Session.objects.filter(meeting=meeting)\n if include_timeslot_types is not None:\n sessions = sessions.filter(type__in=include_timeslot_types)\n sessions = add_event_info_to_session_qs(\n sessions.exclude(\n type__in=IGNORE_TIMESLOT_TYPES,\n ).order_by('pk'),\n requested_time=True,\n requested_by=True,\n ).filter(\n Q(current_status__in=['appr', 'schedw', 'scheda', 'sched'])\n | Q(current_status__in=tombstone_states, pk__in={a.session_id for a in assignments})\n ).prefetch_related(\n 'resources', 'group', 'group__parent', 'group__type', 'joint_with_groups', 'purpose',\n )\n\n timeslots_qs = TimeSlot.objects.filter(meeting=meeting)\n if include_timeslot_types is not None:\n timeslots_qs = timeslots_qs.filter(type__in=include_timeslot_types)\n timeslots_qs = timeslots_qs.exclude(\n type__in=IGNORE_TIMESLOT_TYPES,\n ).prefetch_related('type').order_by('location', 'time', 'name')\n\n if timeslots_qs.count() > 0:\n min_duration = min(t.duration for t in timeslots_qs)\n max_duration = max(t.duration for t in timeslots_qs)\n else:\n min_duration = 1\n max_duration = 2\n\n def timedelta_to_css_ems(timedelta):\n # we scale the session and slots a bit according to their\n # length for an added visual clue\n capped_min_d = max(min_duration, datetime.timedelta(minutes=30))\n capped_max_d = min(max_duration, datetime.timedelta(hours=4))\n capped_timedelta = min(max(capped_min_d, timedelta), capped_max_d)\n\n min_d_css_rems = 8\n max_d_css_rems = 10\n # interpolate\n scale = (capped_timedelta - capped_min_d) / (capped_max_d - capped_min_d) if capped_min_d != capped_max_d else 1\n return min_d_css_rems + (max_d_css_rems - min_d_css_rems) * scale\n\n def prepare_sessions_for_display(sessions):\n # requesters\n requested_by_lookup = {p.pk: p for p in Person.objects.filter(pk__in=set(s.requested_by for s in sessions if s.requested_by))}\n\n # constraints\n constraints_for_sessions, formatted_constraints_for_sessions, constraint_names = preprocess_constraints_for_meeting_schedule_editor(meeting, sessions)\n\n sessions_for_group = defaultdict(list)\n for s in sessions:\n sessions_for_group[s.group_id].append(s)\n\n for s in sessions:\n s.requested_by_person = requested_by_lookup.get(s.requested_by)\n\n s.scheduling_label = \"???\"\n s.purpose_label = None\n if (s.purpose.slug in ('none', 'regular')) and s.group:\n s.scheduling_label = s.group.acronym\n s.purpose_label = 'BoF' if s.group.is_bof() else s.group.type.name\n else:\n s.purpose_label = s.purpose.name\n if s.name:\n s.scheduling_label = s.name\n\n s.requested_duration_in_hours = round(s.requested_duration.seconds / 60.0 / 60.0, 1)\n\n session_layout_margin = 0.2\n s.layout_width = timedelta_to_css_ems(s.requested_duration) - 2 * session_layout_margin\n s.parent_acronym = s.group.parent.acronym if s.group and s.group.parent else \"\"\n\n # compress the constraints, so similar constraint labels are\n # shared between the conflicting sessions they cover - the JS\n # then simply has to detect violations and show the\n # preprocessed labels\n constrained_sessions_grouped_by_label = defaultdict(set)\n for name_id, ts in itertools.groupby(sorted(constraints_for_sessions.get(s.pk, [])), key=lambda t: t[0]):\n ts = list(ts)\n session_pks = (t[1] for t in ts)\n constraint_name = constraint_names[name_id]\n if \"{count}\" in constraint_name.formatted_editor_label:\n for session_pk, grouped_session_pks in itertools.groupby(session_pks):\n count = sum(1 for i in grouped_session_pks)\n constrained_sessions_grouped_by_label[format_html(constraint_name.formatted_editor_label, count=count)].add(session_pk)\n\n else:\n constrained_sessions_grouped_by_label[constraint_name.formatted_editor_label].update(session_pks)\n\n s.constrained_sessions = list(constrained_sessions_grouped_by_label.items())\n s.formatted_constraints = formatted_constraints_for_sessions.get(s.pk, {})\n\n s.other_sessions = [s_other for s_other in sessions_for_group.get(s.group_id) if s != s_other]\n\n s.readonly = s.current_status in tombstone_states or any(a.schedule_id != schedule.pk for a in assignments_by_session.get(s.pk, []))\n\n def prepare_timeslots_for_display(timeslots, rooms):\n \"\"\"Prepare timeslot data for template\n\n Prepares timeslots for display by sorting into groups in a structure\n that can be rendered by the template and by adding some data to the timeslot\n instances. Currently adds a 'layout_width' property to each timeslot instance.\n The layout_width is the width, in em, that should be used to style the timeslot's\n width.\n\n Rooms are partitioned into groups that have identical sets of timeslots\n for the entire meeting.\n\n The result of this method is an OrderedDict, days, keyed by the Date\n of each day that has at least one timeslot. The value of days[day] is a\n list with one entry for each group of rooms. Each entry is a list of\n dicts with keys 'room' and 'timeslots'. The 'room' value is the room\n instance and 'timeslots' is a list of timeslot instances for that room.\n\n The format is more easily illustrated than explained:\n\n days = OrderedDict(\n Date(2021, 5, 27): [\n [ # room group 1\n {'room': <room1>, 'timeslots': [<room1 timeslot1>, <room1 timeslot2>]},\n {'room': <room2>, 'timeslots': [<room2 timeslot1>, <room2 timeslot2>]},\n {'room': <room3>, 'timeslots': [<room3 timeslot1>, <room3 timeslot2>]},\n ],\n [ # room group 2\n {'room': <room4>, 'timeslots': [<room4 timeslot1>]},\n ],\n ],\n Date(2021, 5, 28): [\n [ # room group 1\n {'room': <room1>, 'timeslots': [<room1 timeslot3>]},\n {'room': <room2>, 'timeslots': [<room2 timeslot3>]},\n {'room': <room3>, 'timeslots': [<room3 timeslot3>]},\n ],\n [ # room group 2\n {'room': <room4>, 'timeslots': []},\n ],\n ],\n )\n \"\"\"\n\n # Populate room_data. This collects the timeslots for each room binned by\n # day, plus data needed for sorting the rooms for display.\n room_data = dict()\n all_days = set()\n # timeslots_qs is already sorted by location, name, and time\n for t in timeslots:\n if t.location not in rooms:\n continue\n\n t.layout_width = timedelta_to_css_ems(t.duration)\n if t.location_id not in room_data:\n room_data[t.location_id] = dict(\n timeslots_by_day=dict(),\n timeslot_count=0,\n start_and_duration=[],\n first_timeslot = t,\n )\n rd = room_data[t.location_id]\n rd['timeslot_count'] += 1\n rd['start_and_duration'].append((t.time, t.duration))\n ttd = t.time.date()\n all_days.add(ttd)\n if ttd not in rd['timeslots_by_day']:\n rd['timeslots_by_day'][ttd] = []\n rd['timeslots_by_day'][ttd].append(t)\n\n all_days = sorted(all_days) # changes set to a list\n # Note the maximum timeslot count for any room\n if len(room_data) > 0:\n max_timeslots = max(rd['timeslot_count'] for rd in room_data.values())\n else:\n max_timeslots = 0\n\n # Partition rooms into groups with identical timeslot arrangements.\n # Start by discarding any roos that have no timeslots.\n rooms_with_timeslots = [r for r in rooms if r.pk in room_data]\n # Then sort the remaining rooms.\n sorted_rooms = sorted(\n rooms_with_timeslots,\n key=lambda room: (\n # First, sort regular session rooms ahead of others - these will usually\n # have more timeslots than other room types.\n 0 if room_data[room.pk]['timeslot_count'] == max_timeslots else 1,\n # Sort rooms with earlier timeslots ahead of later\n room_data[room.pk]['first_timeslot'].time,\n # Sort rooms with more sessions ahead of rooms with fewer\n 0 - room_data[room.pk]['timeslot_count'],\n # Sort by list of starting time and duration so that groups with identical\n # timeslot structure will be neighbors. The grouping algorithm relies on this!\n room_data[room.pk]['start_and_duration'],\n # Within each group, sort higher capacity rooms first.\n room.capacity,\n # Finally, sort alphabetically by name\n room.name\n )\n )\n\n # Rooms are now ordered so rooms with identical timeslot arrangements are neighbors.\n # Walk the list, splitting these into groups.\n room_groups = []\n last_start_and_duration = None # Used to watch for changes in start_and_duration\n for room in sorted_rooms:\n if last_start_and_duration != room_data[room.pk]['start_and_duration']:\n room_groups.append([]) # start a new room_group\n last_start_and_duration = room_data[room.pk]['start_and_duration']\n room_groups[-1].append(room)\n\n # Next, build the structure that will hold the data for the view. This makes it\n # easier to arrange that every room has an entry for every day, even if there is\n # no timeslot for that day. This makes the HTML template much easier to write.\n # Use OrderedDicts instead of lists so that we can easily put timeslot data in the\n # right place.\n days = OrderedDict(\n (\n day, # key in the Ordered Dict\n [\n # each value is an OrderedDict of room group data\n OrderedDict(\n (room.pk, dict(room=room, timeslots=[]))\n for room in rg\n ) for rg in room_groups\n ]\n ) for day in all_days\n )\n\n # With the structure's skeleton built, now fill in the data. The loops must\n # preserve the order of room groups and rooms within each group.\n for rg_num, rgroup in enumerate(room_groups):\n for room in rgroup:\n for day, ts_for_day in room_data[room.pk]['timeslots_by_day'].items():\n days[day][rg_num][room.pk]['timeslots'] = ts_for_day\n\n # Now convert the OrderedDict entries into lists since we don't need to\n # do lookup by pk any more.\n for day in days.keys():\n days[day] = [list(rg.values()) for rg in days[day]]\n\n return days\n\n def _json_response(success, status=None, **extra_data):\n if status is None:\n status = 200 if success else 400\n data = dict(success=success, **extra_data)\n return JsonResponse(data, status=status)\n\n if request.method == 'POST':\n if not can_edit:\n permission_denied(request, \"Can't edit this schedule.\")\n\n action = request.POST.get('action')\n\n # Handle ajax requests. Most of these return JSON responses with at least a 'success' key.\n # For the swapdays and swaptimeslots actions, the response is either a redirect to the\n # updated page or a simple BadRequest error page. The latter should not normally be seen\n # by the user, because the front end should be preventing most invalid requests.\n if action == 'assign' and request.POST.get('session', '').isdigit() and request.POST.get('timeslot', '').isdigit():\n session = get_object_or_404(sessions, pk=request.POST['session'])\n timeslot = get_object_or_404(timeslots_qs, pk=request.POST['timeslot'])\n if timeslot_locked(timeslot):\n return _json_response(False, error=\"Can't assign to this timeslot.\")\n\n tombstone_session = None\n\n existing_assignments = SchedTimeSessAssignment.objects.filter(session=session, schedule=schedule)\n\n if existing_assignments:\n assertion('len(existing_assignments) <= 1',\n note='Multiple assignments for {} in schedule {}'.format(session, schedule))\n\n if timeslot_locked(existing_assignments[0].timeslot):\n return _json_response(False, error=\"Can't reassign this session.\")\n\n if schedule.pk == meeting.schedule_id and session.current_status == 'sched':\n old_timeslot = existing_assignments[0].timeslot\n # clone session and leave it as a tombstone\n tombstone_session = session\n tombstone_session.tombstone_for_id = session.pk\n tombstone_session.pk = None\n tombstone_session.save()\n\n session = None\n\n SchedulingEvent.objects.create(\n session=tombstone_session,\n status=SessionStatusName.objects.get(slug='resched'),\n by=request.user.person,\n )\n\n tombstone_session.current_status = 'resched' # rematerialize status for the rendering\n\n SchedTimeSessAssignment.objects.create(\n session=tombstone_session,\n schedule=schedule,\n timeslot=old_timeslot,\n )\n\n existing_assignments.update(timeslot=timeslot, modified=datetime.datetime.now())\n else:\n SchedTimeSessAssignment.objects.create(\n session=session,\n schedule=schedule,\n timeslot=timeslot,\n )\n\n if tombstone_session:\n prepare_sessions_for_display([tombstone_session])\n return _json_response(\n True,\n tombstone=render_to_string(\"meeting/edit_meeting_schedule_session.html\",\n {'session': tombstone_session})\n )\n else:\n return _json_response(True)\n\n elif action == 'unassign' and request.POST.get('session', '').isdigit():\n session = get_object_or_404(sessions, pk=request.POST['session'])\n existing_assignments = SchedTimeSessAssignment.objects.filter(session=session, schedule=schedule)\n assertion('len(existing_assignments) <= 1',\n note='Multiple assignments for {} in schedule {}'.format(session, schedule))\n if not any(timeslot_locked(ea.timeslot) for ea in existing_assignments):\n existing_assignments.delete()\n else:\n return _json_response(False, error=\"Can't unassign this session.\")\n\n return _json_response(True)\n\n elif action == 'swapdays':\n # updating the client side is a bit complicated, so just\n # do a full refresh\n\n swap_days_form = SwapDaysForm(request.POST)\n if not swap_days_form.is_valid():\n return HttpResponseBadRequest(\"Invalid swap: {}\".format(swap_days_form.errors))\n\n source_day = swap_days_form.cleaned_data['source_day']\n target_day = swap_days_form.cleaned_data['target_day']\n\n source_timeslots = [ts for ts in timeslots_qs if ts.time.date() == source_day]\n target_timeslots = [ts for ts in timeslots_qs if ts.time.date() == target_day]\n if any(timeslot_locked(ts) for ts in source_timeslots + target_timeslots):\n return HttpResponseBadRequest(\"Can't swap these days.\")\n\n swap_meeting_schedule_timeslot_assignments(schedule, source_timeslots, target_timeslots, target_day - source_day)\n\n return HttpResponseRedirect(request.get_full_path())\n\n elif action == 'swaptimeslots':\n # Swap sets of timeslots with equal start/end time for a given set of rooms.\n # Gets start and end times from TimeSlot instances for the origin and target,\n # then swaps all timeslots for the requested rooms whose start/end match those.\n # The origin/target timeslots do not need to be the same duration.\n swap_timeslots_form = SwapTimeslotsForm(meeting, request.POST)\n if not swap_timeslots_form.is_valid():\n return HttpResponseBadRequest(\"Invalid swap: {}\".format(swap_timeslots_form.errors))\n\n affected_rooms = swap_timeslots_form.cleaned_data['rooms']\n origin_timeslot = swap_timeslots_form.cleaned_data['origin_timeslot']\n target_timeslot = swap_timeslots_form.cleaned_data['target_timeslot']\n\n origin_timeslots = meeting.timeslot_set.filter(\n location__in=affected_rooms,\n time=origin_timeslot.time,\n duration=origin_timeslot.duration,\n )\n target_timeslots = meeting.timeslot_set.filter(\n location__in=affected_rooms,\n time=target_timeslot.time,\n duration=target_timeslot.duration,\n )\n if (any(timeslot_locked(ts) for ts in origin_timeslots)\n or any(timeslot_locked(ts) for ts in target_timeslots)):\n return HttpResponseBadRequest(\"Can't swap these timeslots.\")\n\n swap_meeting_schedule_timeslot_assignments(\n schedule,\n list(origin_timeslots),\n list(target_timeslots),\n target_timeslot.time - origin_timeslot.time,\n )\n return HttpResponseRedirect(request.get_full_path())\n\n return _json_response(False, error=\"Invalid parameters\")\n\n # Show only rooms that have regular sessions\n if include_timeslot_types is None:\n rooms = meeting.room_set.all()\n else:\n rooms = meeting.room_set.filter(session_types__slug__in=include_timeslot_types)\n\n # Construct timeslot data for the template to render\n days = prepare_timeslots_for_display(timeslots_qs, rooms)\n\n # possible timeslot start/ends\n timeslot_groups = defaultdict(set)\n for ts in timeslots_qs:\n ts.start_end_group = \"ts-group-{}-{}\".format(ts.time.strftime(\"%Y%m%d-%H%M\"), int(ts.duration.total_seconds() / 60))\n timeslot_groups[ts.time.date()].add((ts.time, ts.end_time(), ts.start_end_group))\n\n # prepare sessions\n prepare_sessions_for_display(sessions)\n\n for ts in timeslots_qs:\n ts.session_assignments = []\n timeslots_by_pk = {ts.pk: ts for ts in timeslots_qs}\n\n unassigned_sessions = []\n for s in sessions:\n assigned = False\n for a in assignments_by_session.get(s.pk, []):\n timeslot = timeslots_by_pk.get(a.timeslot_id)\n if timeslot:\n timeslot.session_assignments.append((a, s))\n assigned = True\n\n if not assigned:\n unassigned_sessions.append(s)\n\n # group parent colors\n def cubehelix(i, total, hue=1.2, start_angle=0.5):\n # theory in https://arxiv.org/pdf/1108.5083.pdf\n rotations = total // 4\n x = float(i + 1) / (total + 1)\n phi = 2 * math.pi * (start_angle / 3 + rotations * x)\n a = hue * x * (1 - x) / 2.0\n\n return (\n max(0, min(x + a * (-0.14861 * math.cos(phi) + 1.78277 * math.sin(phi)), 1)),\n max(0, min(x + a * (-0.29227 * math.cos(phi) + -0.90649 * math.sin(phi)), 1)),\n max(0, min(x + a * (1.97294 * math.cos(phi)), 1)),\n )\n\n session_parents = sorted(set(\n s.group.parent for s in sessions\n if s.group and s.group.parent and (s.group.parent.type_id == 'area' or s.group.parent.acronym in ('irtf','iab'))\n ), key=lambda p: p.acronym)\n\n liz_preferred_colors = {\n 'art' : { 'dark' : (204, 121, 167) , 'light' : (234, 232, 230) },\n 'gen' : { 'dark' : (29, 78, 17) , 'light' : (232, 237, 231) },\n 'iab' : { 'dark' : (255, 165, 0) , 'light' : (255, 246, 230) },\n 'int' : { 'dark' : (132, 240, 240) , 'light' : (232, 240, 241) },\n 'irtf' : { 'dark' : (154, 119, 230) , 'light' : (243, 239, 248) },\n 'ops' : { 'dark' : (199, 133, 129) , 'light' : (250, 240, 242) },\n 'rtg' : { 'dark' : (222, 219, 124) , 'light' : (247, 247, 233) },\n 'sec' : { 'dark' : (0, 114, 178) , 'light' : (245, 252, 248) },\n 'tsv' : { 'dark' : (117,201,119) , 'light' : (251, 252, 255) },\n } \n for i, p in enumerate(session_parents):\n if p.acronym in liz_preferred_colors:\n colors = liz_preferred_colors[p.acronym]\n p.scheduling_color = \"rgb({}, {}, {})\".format(*colors['dark'])\n p.light_scheduling_color = \"rgb({}, {}, {})\".format(*colors['light'])\n else:\n rgb_color = cubehelix(i, len(session_parents))\n p.scheduling_color = \"rgb({}, {}, {})\".format(*tuple(int(round(x * 255)) for x in rgb_color))\n p.light_scheduling_color = \"rgb({}, {}, {})\".format(*tuple(int(round((0.9 + 0.1 * x) * 255)) for x in rgb_color))\n\n session_purposes = sorted(set(s.purpose for s in sessions if s.purpose), key=lambda p: p.name)\n timeslot_types = sorted(\n set(\n s.type for s in sessions if s.type\n ).union(\n t.type for t in timeslots_qs.all()\n ),\n key=lambda tstype: tstype.name,\n )\n\n return render(request, \"meeting/edit_meeting_schedule.html\", {\n 'meeting': meeting,\n 'schedule': schedule,\n 'can_edit': can_edit,\n 'can_edit_properties': can_edit or secretariat,\n 'secretariat': secretariat,\n 'days': days,\n 'timeslot_groups': sorted((d, list(sorted(t_groups))) for d, t_groups in timeslot_groups.items()),\n 'unassigned_sessions': unassigned_sessions,\n 'session_parents': session_parents,\n 'session_purposes': session_purposes,\n 'timeslot_types': timeslot_types,\n 'hide_menu': True,\n 'lock_time': lock_time,\n })", "def index(request):\n try:\n doctor = Doctor.objects.get(user=request.user)\n except Doctor.DoesNotExist:\n raise Http404(\"Doctor with current user instance unavailable!\")\n\n #counting patients who have recovered or are asymptomatic\n asymptomatic_patients = doctor.patients.filter(asymptomatic=True).count()\n\n #call function to calculate doctor age\n age = age_calculation(doctor.user.date_of_birth)\n\n #call method to filter for valid appointments\n appointments = get_appointments_list(doctor.doctor_appointments.all())\n\n context = {\n \"hospital\": doctor.hospital,\n \"doctor\": doctor,\n \"appointments_number\": len(appointments),\n \"patients_number\": doctor.patients.filter(asymptomatic=False).count(),\n \"recovered_number\": asymptomatic_patients,\n \"age\": age \n }\n #return information\n return render(request, \"doctors/index.html\", context)", "def start_end():\n return render_template('presence_start_end.html')", "def smart_alarm_homepage():\r\n s.run(blocking=False)\r\n try:\r\n delete_alarm()\r\n except:\r\n pass\r\n try:\r\n delete_notification()\r\n except:\r\n pass\r\n try:\r\n set_new_alarm()\r\n return redirect(request.path, code=302) \r\n except:\r\n pass\r\n return render_template('index.html', title='Alarm Clock', notifications=notifications, image=get_image(), alarms=alarms)", "def show_cal(request, year=None, month=None):\n if year == None:\n # get the current comic as a starting point\n lToday = Comic.objects.filter(published=True).order_by('-date')[0].date\n year = lToday.year\n month = lToday.month\n\n return calendar(request, year, month)", "def index():\n groups = list(map(lambda x: x.json(), GroupModel.query.all())) \n return render_template('dashboard/schedules.html', groups=groups)", "def meetings_en(request):\n meeting_list = Meeting.objects.order_by('held_date')\n context = {'meeting_list': meeting_list}\n return render(request, 'sacms/meetings_en.html', context)", "def response(self):\n response = HttpResponse(self.cal.to_ical(), content_type='text/calendar')\n response['Content-Type'] = 'text/calendar; charset=utf-8'\n response['Content-Disposition'] = 'attachment; filename=' + self.filename + '.ics'\n return response" ]
[ "0.73396885", "0.71008706", "0.69112563", "0.68383145", "0.6778799", "0.67298746", "0.6640715", "0.65311074", "0.6378886", "0.632984", "0.62694013", "0.6260236", "0.62068444", "0.62027246", "0.6160487", "0.6143634", "0.6137082", "0.6105613", "0.6103373", "0.60935265", "0.6009459", "0.594074", "0.59173864", "0.58636355", "0.58547795", "0.58531976", "0.583248", "0.5788148", "0.5760009", "0.5740915", "0.5736945", "0.5695784", "0.56620336", "0.56492263", "0.5647556", "0.5643986", "0.5617818", "0.56009084", "0.5597512", "0.5588265", "0.5567573", "0.55669296", "0.5558844", "0.5499103", "0.54839027", "0.5476558", "0.5457416", "0.54442626", "0.5441966", "0.54324687", "0.5431639", "0.5430482", "0.54133725", "0.5406043", "0.5402972", "0.5402105", "0.5398638", "0.5394934", "0.53914714", "0.5382094", "0.537043", "0.5366571", "0.5361585", "0.53587097", "0.5350781", "0.5343115", "0.53371143", "0.5334915", "0.5328747", "0.53279006", "0.5324471", "0.5323176", "0.53222084", "0.531037", "0.5299718", "0.529866", "0.5297144", "0.5293292", "0.5292138", "0.52916175", "0.5283577", "0.5283568", "0.5282147", "0.5273077", "0.5265657", "0.52594584", "0.52567416", "0.52460057", "0.52420264", "0.5232221", "0.522156", "0.5220457", "0.52114683", "0.521112", "0.51885796", "0.5187399", "0.5180554", "0.51805514", "0.51694053", "0.5164118" ]
0.6915958
2
View a customized calendar
def calendar_view(request, calendar_id): calendar_obj = Calendar.objects.get(pk=calendar_id) try: appointments = Appointment.objects.all().filter(calendar=calendar_obj) appointments = jsonify(appointments) except: appointments = [] calendar_obj = calendar_obj.serialize() calendar_obj["non_working_days"] = [day for day in [0, 1, 2, 3, 4, 5, 6] if day not in calendar_obj["working_days"]] return render(request, 'calendar_view.html', {'calendar_obj': calendar_obj, 'appointments': appointments})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show_cal(request, year=None, month=None):\n if year == None:\n # get the current comic as a starting point\n lToday = Comic.objects.filter(published=True).order_by('-date')[0].date\n year = lToday.year\n month = lToday.month\n\n return calendar(request, year, month)", "def calendar(self):\r\n self.cal = QCalendarWidget()\r\n self.cal.setWindowTitle(\"Get Birthday\")\r\n self.cal.show()\r\n self.cal.clicked.connect(self.dateB)", "def abrirCalendar():\n try:\n var.dlgcalendar.show()\n except Exception as error:\n print('Error: %s ' % str(error))", "def bootstrap_calendar(css_classes):\n return render_to_string(\n 'django_bootstrap_calendar/calendar.html',\n {'css_classes': css_classes}\n )", "def calendar_view_link(calendar):\n linkdef = {\n \"label\": calendar.name, \"modal\": True,\n \"title\": _(\"View calendar detail\")\n }\n if calendar.__class__.__name__ == \"UserCalendar\":\n linkdef[\"url\"] = reverse(\n \"modoboa_radicale:user_calendar_detail\", args=[calendar.pk]\n )\n else:\n linkdef[\"url\"] = reverse(\n \"modoboa_radicale:shared_calendar_detail\", args=[calendar.pk]\n )\n return render_link(linkdef)", "def calendar(request, year=None, month=None):\n today = datetime.date.today()\n year = int(year) if year else today.year\n month = int(month) if month else today.month\n try:\n first_of_month = datetime.date(year, month, 1)\n except ValueError: # Not a valid year and month\n raise Http404\n\n events = Event.objects.filter(event_start__year=year, event_start__month=month)\n cal = EventCalendar(events, year, month).formatmonth(year, month)\n\n user = request.user\n future_attending_events = attending_events(user, today)\n\n months = year * 12 + month - 1 # months since epoch (Christ)\n month_list = [\n datetime.date(m // 12, m % 12 + 1, 1) for m in range(months - 5, months + 7)\n ]\n\n # Get some random dates in the current, next, and previous month.\n # These dates are used load the calendar for that month.\n # * prev is some day in the previous month\n # * this is some day in this month\n # * next is some day in the next month\n context = {\n \"calendar\": mark_safe(cal),\n \"prev\": first_of_month - datetime.timedelta(27),\n \"this\": first_of_month,\n \"next\": first_of_month + datetime.timedelta(32),\n \"future_attending_events\": future_attending_events,\n \"month_list\": month_list,\n }\n\n return render(request, \"events/event_list.html\", context)", "def index():\n # return render_template('index.html', events=get_calendar_events_today(CALENDAR_URL))\n return render_template('index.html', events=get_calendar_events_limit(CALENDAR_URL), events_sorted=True)", "def refresh_calendar():\n manage.refresh_calendar()", "def display_calendar(daze, month, year):\n log = daze.dateDict\n if not year:\n year = date.today().year # defaults to this year\n if month:\n first = date(year, month, 1)\n last = max([day for day in cal.itermonthdates(year, month) if day.month == month])\n s, ndates, firstdate, lastdate = daze.summarize(firstdate=first, lastdate=last)\n else:\n s, ndates, firstdate, lastdate = daze.summarize()\n places = sorted(s, key=s.get, reverse=True)\n colors = ['green', 'magenta', 'white', 'cyan', 'blue', 'red', 'yellow']\n months = calendar.month_name[1:]\n dates = [firstdate + timedelta(days=i) for i in range((lastdate - firstdate).days + 1)]\n\n matches = {p: c for (p, c) in zip(places, colors)}\n\n for (p, c) in matches.items():\n click.secho(\" %s \" % p, bg=c, fg='black', bold=True)\n\n for _date in dates:\n if _date.day == 1 or _date == firstdate:\n click.echo('')\n click.echo(\"\\n\" + months[_date.month - 1])\n if (_date.isoweekday() != 7):\n click.echo(\" \" * 3 * _date.isoweekday(), nl=False)\n if _date in log:\n p = log[_date]\n click.secho(\"%s\" % str(_date.day).rjust(3),\n fg='black',\n bg=matches[p],\n nl=(_date.isoweekday() == 6))\n else:\n click.secho(\"%s\" % str(_date.day).rjust(3),\n fg='black', nl=(_date.isoweekday() == 6))\n\n click.echo('\\n\\n\\n')", "def calendar(request, pYear, pMonth):\n lYear = int(pYear)\n lMonth = int(pMonth)\n lCalendarFromMonth = datetime.date(lYear, lMonth, 1)\n lCalendarToMonth = datetime.date(lYear, lMonth, monthrange(lYear, lMonth)[1])\n lComics = Comic.objects.filter(published=True, date__gte=lCalendarFromMonth, date__lte=lCalendarToMonth).order_by('date')\n lCalendar = ArchiveCalendar(lComics).formatmonth(lYear, lMonth)\n lPreviousYear = lYear\n lPreviousMonth = lMonth - 1\n if lPreviousMonth == 0:\n lPreviousMonth = 12\n lPreviousYear = lYear - 1\n lNextYear = lYear\n lNextMonth = lMonth + 1\n if lNextMonth == 13:\n lNextMonth = 1\n lNextYear = lYear + 1\n pmn = named_month(lPreviousMonth)\n nmn = named_month(lNextMonth)\n \n # now for something fun:\n # if we have the first or last comics in a collection, we DON'T want to paginate this!\n fComic = lComics[0]\n lComic = lComics.reverse()[0]\n aComic = fComic.get_first()\n bComic = fComic.get_latest()\n \n \n if aComic is None or fComic.id == aComic.id:\n lPreviousYear = 0\n lPreviousMonth = 0\n if bComic is None or lComic.id == bComic.id:\n lNextYear = 0\n lNextMonth = 0\n \n\n return render(request, 'archive/archive_cal.html', {'Calendar' : mark_safe(lCalendar),\n 'Month' : str(lMonth),\n 'MonthName' : named_month(lMonth),\n 'Year' : str(lYear),\n 'PreviousMonth' : str(lPreviousMonth),\n 'PreviousMonthName' : pmn,\n 'PreviousYear' : str(lPreviousYear),\n 'NextMonth' : str(lNextMonth),\n 'NextMonthName' : nmn,\n 'NextYear' : str(lNextYear),\n })", "def calendar(self, calendar_id):\r\n return c.Calendar(self, calendar_id)", "def calendar(self, calendar_id):\r\n return c.Calendar(self, calendar_id)", "def get_tradingview_ecocal(width, height, show_copyright):\n return_data = ''\n theme = get_sa_theme()\n tradingview_copyright = ''\n\n if str(width) == '0':\n width = '\"100%\"'\n if str(height) == '0':\n height = '\"100%\"'\n\n if str(show_copyright) == '1':\n tradingview_copyright = ''+\\\n '<div class=\"tradingview-widget-copyright\">'+\\\n '<a href=\"https://www.tradingview.com/markets/currencies/economic-calendar/\" rel=\"noopener\" target=\"_blank\">'+\\\n '<span class=\"blue-text\">Economic Calendar</span></a> by TradingView'+\\\n '</div>'\n\n return_data = '' +\\\n '<div class=\"tradingview-widget-container\">'+\\\n ' <div class=\"tradingview-widget-container__widget\"></div>'+\\\n tradingview_copyright+\\\n ' <script type=\"text/javascript\" '+\\\n 'src=\"https://s3.tradingview.com/external-embedding/embed-widget-events.js\" async>'+\\\n ' {'+\\\n ' \"colorTheme\": \"'+ theme +'\",'+\\\n ' \"isTransparent\": true,'+\\\n ' \"width\": '+ width +','+\\\n ' \"height\": '+ height +','+\\\n ' \"locale\": \"en\",'+\\\n ' \"importanceFilter\": \"-1,0,1\"'+\\\n '}'+\\\n ' </script>'+\\\n '</div>'\n return return_data", "def command_show(calendar):\n cal = {k: v for k, v in sorted(calendar.items(), key=lambda item: (\n item[0][0:4], item[0][5:7], item[0][8:]))}\n cal = {k: v for k, v in sorted(\n calendar.items(), key=lambda item: item[1][0][\"start\"])}\n\n cal_str = \"\\n\"\n for key in cal.keys():\n cal_str += f\"{key} : \\n\"\n\n for event in cal[key]:\n for sub_key in event.keys():\n if sub_key in (\"start\", \"end\"):\n cal_str += f\" {sub_key} : {str(event[sub_key]).zfill(2)}:00,\\n\"\n else:\n cal_str += f\" {sub_key} : {event[sub_key]}\\n\"\n if len(cal[key]) > 1:\n cal_str += \"\\n\"\n cal_str = cal_str.rstrip()\n cal_str += \"\\n\"\n return cal_str.rstrip()", "def bootstrap_calendar_js(*args, **kwargs):\n\n options = {}\n\n try:\n options[\"language\"] = kwargs[\"language\"]\n except KeyError:\n pass\n\n try:\n options[\"events_url\"] = kwargs[\"events_url\"]\n except KeyError:\n options[\"events_url\"] = '/calendar/json/'\n\n try:\n options[\"view\"] = kwargs[\"view\"]\n except KeyError:\n options[\"view\"] = 'month'\n\n try:\n options[\"language\"] = kwargs[\"language\"]\n except KeyError:\n options[\"language\"] = 'en'\n\n try:\n options[\"first_day\"] = kwargs[\"first_day\"]\n except KeyError:\n options[\"first_day\"] = 2\n\n try:\n options[\"width\"] = kwargs[\"width\"]\n except KeyError:\n options[\"width\"] = '100%'\n\n try:\n options[\"time_start\"] = kwargs[\"time_start\"]\n except KeyError:\n options[\"time_start\"] = '00:00'\n\n try:\n options[\"time_end\"] = kwargs[\"time_end\"]\n except KeyError:\n options[\"time_end\"] = '24:00'\n\n return render_to_string(\n 'django_bootstrap_calendar/calendar_js.html',\n options\n )", "def calendar_events(self):\r\n return CalendarEvents(self)", "def calendar_for_event_description(ed):\n return icemac.ab.calendar.interfaces.ICalendar(ed.context)", "def __calender_events(self):\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('calendar', 'v3', http=http)\n\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n pt=\"Getting the upcoming latest events\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pt)\n eventsResult = service.events().list(\n calendarId='primary', timeMin=now, maxResults=1, singleEvents=True,\n orderBy='startTime').execute()\n events = eventsResult.get('items', [])\n\n if not events:\n pq=\"No upcoming events found.\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pq)\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n #start1=''.join(start)\n summary=event['summary']\n print start,summary\n requests.get(\"http://localhost:8080/statement?text=\"+start+\" \"+summary)", "def cal():\n this_cal = Kalendar()\n to_display = \"\"\n\n for elements in this_cal.get_all_elements():\n to_display += elements[\"key\"] + \":<BR>\"\n for element in elements[\"value\"]:\n to_display += \"&nbsp;&nbsp;&nbsp;&nbsp;\" + str(element) + \"<BR>\"\n\n return to_display", "def mkcalendar(self, url, body=\"\", dummy=None):\n return self.request(url, \"MKCALENDAR\", body)", "def display_calendar_redo(daze, year, month):\n log = daze.dateDict\n\n # Set first and last dates\n if year is None:\n year = date.today().year\n if month is None:\n first = date(year, 1, 1)\n if year == date.today().year:\n last = date.today()\n else:\n last = date(year, 12, 31)\n else:\n first = date(year, month, 1)\n last = date(2016, month, calendar.monthrange(2016, month)[1])\n\n # Get summarized data\n s, ndates, firstdate, lastdate = daze.summarize()\n places = sorted(s, key=s.get, reverse=True)\n colors = ['green', 'magenta', 'white', 'cyan', 'blue', 'red', 'yellow']", "def on_btnCalendar_clicked(self, widget):\n try:\n variables.semaforo = 1\n variables.vencalendar.connect('delete-event', lambda w, e: w.hide() or True)\n variables.vencalendar.show()\n\n except:\n print('error abrir calendario')", "def show_events(usrservice,calservice):\r\n print(args.action, args.inuser, 'celendar events')", "def ical(self) -> Calendar:\n cal = Calendar()\n event = IEvent()\n event.add(\"summary\", \"Video Chat\")\n event.add(\"dtstart\", self.start)\n cal.add_component(event)\n return cal.to_ical()", "def calendar_view(self):\n if \"calendarView\" in self._prop_dict:\n return CalendarViewCollectionPage(self._prop_dict[\"calendarView\"])\n else:\n return None", "def create_month_scr(self, month, toogle_today=False):\n\n scr = Screen()\n m = self.month_names_eng[self.active_date[1] - 1]\n scr.name = \"%s-%s\" % (m, self.active_date[2]) # like march-2015\n\n # Grid for days\n grid_layout = GridLayout(cols=7, rows=7, size_hint=(1, 1), pos_hint={\"top\": 1})\n scr.add_widget(grid_layout)\n\n # Days abbrs\n for i in range(7):\n if i >= 5: # weekends\n l = Label(text=self.days_abrs[i], color=(1, 0, 0, 1))\n else: # work days\n l = Label(text=self.days_abrs[i], text_size=(self.size[0], None), halign=\"center\")\n\n grid_layout.add_widget(l)\n\n global holiday, halfday\n\n # Buttons with days numbers\n for week in month:\n for day in week:\n if day[1] >= 6: # weekends\n self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))\n else:\n self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))\n for i in range(len(holiday)):\n if self.active_date[2] == holiday[i][2]:\n if self.active_date[1] == holiday[i][1]:\n if day[0] == holiday[i][0]:\n self.tbtn.background_color=(128, 0, 128, 1)\n for i in range(len(halfday)):\n if self.active_date[2] == halfday[i][2]:\n if self.active_date[1] == halfday[i][1]:\n if day[0] == halfday[i][0]:\n self.tbtn.background_color=(0, 255, 255, 0.5)\n\n self.tbtn.bind(on_press=self.get_btn_value)\n\n if toogle_today:\n # Down today button\n if day[0] == self.active_date[0] and day[2] == 1:\n self.tbtn.state = \"down\"\n # Disable buttons with days from other months\n if day[2] == 0:\n self.tbtn.text = \" \"\n self.tbtn.disabled = True\n self.tbtn.background_color = (0, 0, 0, 0.1)\n\n grid_layout.add_widget(self.tbtn)\n\n self.sm.add_widget(scr)", "def schedule(request):\r\n\r\n return render(request, 'editorial/schedule.html', {})", "def schedule(request):\n return render(request, 'vaxcharts/schedule.html')", "def ical_event(request, event_id):\n\n event = Event.objects.get(id=event_id)\n\n # Use the same template for both Event and BedPres.\n template = loader.get_template(\"events/event_icalendar.ics\")\n context = {\n \"event_list\": (event,),\n }\n response = HttpResponse(template.render(context), content_type=\"text/calendar\")\n response[\"Content-Disposition\"] = \"attachment; filename=Nabla_%s.ics\" % event.slug\n return response", "def getCalendar(self):\n return aq_parent(aq_inner(self))", "def timesheet(request):\r\n return render(\r\n request,\r\n 'timesheet/timesheet.html'\r\n )", "def main():\n# year = int(input(\"Enter year for calendar: \"))\n# first_day = first_day_of_year(year)\n\n # Loop through months 1 through 12\n # for month in range(1, NUM_MONTHS + 1):\n# first_day = print_month(first_day, month, year)\n\n canvas = make_canvas(CANVAS_WIDTH, CANVAS_HEIGHT, 'Calendar')\n # present the header, today's date\n\n top_rows(canvas)\n # present two buttons: weekly display and monthly display\n weekly_display_type = True\n date_to_present = date.today()\n #button_weekly(canvas,weekly_display_type,date_to_present)\n #button_monthly(canvas, weekly_display_type, date_to_present)\n # present weekly display\n canvas.update()\n canvas.mainloop()", "def main(request, year=None):\n\tif year: year = int(year)\n\telse: year = time.localtime()[0]\n\n\tnowYear, nowMonth = time.localtime()[:2]\n\tlst = []\n\n\tfor y in [year, year+1, year+2]:\n\t\tmonthLst = []\n\t\tfor n, month in enumerate(MONTH_NAMES):\n\t\t\tentry\t= current = False\n\t\t\tentries\t= entry.objects.filter(date__year=y, date__month=n+1)\n\n\t\t\tif entries:\n\t\t\t\tentry = True\n\t\t\tif y == nowYear and n+1 == nowMonth:\n\t\t\t\tcurrent = True\n\t\t\tmonthLst.append(dict(n=n+1, name=month, entry=entry, current=current))\n\t\tlst.append((y, monthLst))\n\n\treturn render_to_response(\"cal/\", dict(years=lst, user=request.user, year=year, reminders=reminders(request)))", "def show_events(request):\n event_list = Event.objects.order_by('-date')\n\n event_form = EventForm()\n\n context = {'events': event_list, 'form': event_form}\n return render(request, 'metro_app/events_view.html', context)", "def calendars(self):\r\n return c.Calendars(self)", "def calendars(self):\r\n return c.Calendars(self)", "def response(self):\n response = HttpResponse(self.cal.to_ical(), content_type='text/calendar')\n response['Content-Type'] = 'text/calendar; charset=utf-8'\n response['Content-Disposition'] = 'attachment; filename=' + self.filename + '.ics'\n return response", "def response(self):\n response = HttpResponse(self.cal.to_ical(), content_type='text/calendar')\n response['Content-Type'] = 'text/calendar; charset=utf-8'\n response['Content-Disposition'] = 'attachment; filename=' + self.filename + '.ics'\n return response", "def calendar_lists(self):\r\n return CalendarLists(self)", "def timeline(self, **kwargs):\n\n def rtm(n, multiple=10):\n \"\"\"Round to multiple.\"\"\"\n return int(multiple * round(float(n) / multiple))\n\n beginning_minutes = 7 * 60 + 20 # starting time is 7:20\n end_minutes = 21 * 60 # ending time is 21:00\n\n interval = 100 # 100 minutes for each period (90 + 10)\n\n total_minutes = ((end_minutes - beginning_minutes) // interval + 1) * interval\n number_of_intervals = total_minutes // interval\n\n segments = total_minutes // 10\n days = {i: [[' '] * segments + ['โ”‚']] for i in range(5)}\n\n for course in self.get_sorted_courses(include_unscheduled=False):\n i = (rtm(course.time.start) - beginning_minutes) // 10\n width = (rtm(course.time.end) - rtm(course.time.start)) // 10\n\n day = 0\n for j in range(i, i + width):\n if days[course.weekday()][day][j] != ' ':\n day += 1\n if len(days[course.weekday()]) == day:\n days[course.weekday()].append([' '] * segments + ['โ”‚'])\n\n days[course.weekday()][day][i] = '{'\n days[course.weekday()][day][i + width - 1] = '}'\n\n space = width - 2 # width minus { and }\n\n name = Ansi.color(\n course.abbreviation\n if len(course.abbreviation) <= space\n else course.abbreviation[: space - 1] + \".\",\n course_types[course.type].color,\n )\n\n # TODO: this doesn't center correctly, for some reason\n name = Ansi.center(name, space)\n\n days[course.weekday()][day][i + 1] = name\n for j in range(i + 2, i + width - 1):\n days[course.weekday()][day][j] = ''\n\n # print the header\n print(\n (\" โ•ญ\" + \"โ”€\" * (total_minutes // 10) + \"โ•ฎ\\n โ”‚\")\n + \"\".join(\n Ansi.bold(\n minutes_to_HHMM(beginning_minutes + interval * i)\n .strip()\n .ljust(10, \" \")\n )\n for i in range(number_of_intervals)\n )\n + \"โ”‚\\nโ•ญโ”€โ”€โ”€โ”€โ”ผโ”€\"\n + \"\".join(\n \"โ”€\" * number_of_intervals\n + (\"โ”€\" if i != number_of_intervals - 1 else \"โ”ค\")\n for i in range(number_of_intervals)\n )\n )\n\n for i in range(5):\n x = f\"โ”‚ {WD_EN[i][:2].capitalize()} โ”‚\"\n\n for j, day in enumerate(days[i]):\n if j == 0:\n print(x, end=\"\")\n else:\n print(\"โ”‚ โ”‚\", end=\"\")\n\n print(\"\".join(day))\n\n # print the very last line\n print(\n \"โ•ฐโ”€โ”€โ”€โ”€โ”ดโ”€\"\n + \"\".join(\n \"โ”€\" * number_of_intervals\n + (\"โ”€\" if i != number_of_intervals - 1 else \"โ•ฏ\")\n for i in range(number_of_intervals)\n )\n )", "def _PrintOwnCalendars(self):\n\n feed = self.cal_client.GetOwnCalendarsFeed()\n print 'Printing owncalendars: %s' % feed.title.text\n for i, a_calendar in zip(xrange(len(feed.entry)), feed.entry):\n print '\\t%s. %s' % (i, a_calendar.title.text,)", "def calendar(self, name=None, cal_id=None, cal_url=None):\n if not cal_url:\n return self.calendar_home_set.calendar(name, cal_id)\n else:\n return Calendar(self.client, url=self.client.url.join(cal_url))", "def showSelectedDate(self):\n pass", "def calendar_view_basic(request, owner_type, owner_id):\n\n # Like before, obtain the context for the user's request.\n context = RequestContext(request)\n\n user = request.user\n profile = get_profile(user)\n user_profile = profile[0]\n\n if request.method == 'GET':\n verified_obj = verified_calendar(context, owner_type, owner_id, user)\n if not isinstance(verified_obj, HttpResponse):\n calendar, edit_priv = verified_obj\n events = calendar.event_set.all()\n else:\n return verified_obj\n\n response_object = {'calendar' : calendar, 'events': events,\n 'edit_priv': edit_priv, 'owner_type': owner_type,\n }\n\n if owner_type == \"user\":\n\n # send school calendar\n profile_school = user_profile.getSchool()\n response_object['school'] = profile_school\n if profile_school:\n response_object['school_events'] = profile_school.cal.event_set.all()\n\n # send course calendars\n if isinstance(user_profile, Instructor):\n profile_courses = Course.objects.filter(creator=user.id)\n else:\n profile_courses = user_profile.courses.all()\n course_calendars = []\n for course in profile_courses:\n course_calendars.append({'course': course, 'events': course.cal.event_set.all()})\n response_object['course_calendars'] = course_calendars;\n return render_to_response('scheduler/calendar_basic.html',\n response_object, context)\n else:\n # No context variables to pass to the template system, hence the\n # blank dictionary object...\n return render_to_response('/login.html', {}, context)", "def bootstrap_calendar_css(*args):\n return render_to_string(\n 'django_bootstrap_calendar/calendar_css.html'\n )", "def getCalendar(self):\n cal = BlankCalendar()\n for datable in self.run_query():\n cal.add_component(datable.getEvent())\n \n return cal", "def name(self):\n return 'Trakt My Upcoming Calendar'", "def calendar_events(self):\r\n return calendars.CalendarEvents(self)", "def _PrintUserCalendars(self):\n\n feed = self.cal_client.GetAllCalendarsFeed()\n print 'Printing allcalendars: %s' % feed.title.text\n for i, a_calendar in zip(xrange(len(feed.entry)), feed.entry):\n print '\\t%s. %s' % (i, a_calendar.title.text,)", "def index(http_request, year=datetime.datetime.now().strftime(\"%Y\"), month=datetime.datetime.now().strftime(\"%m\")):\n\t# make sure the year number and month number are ints\n\tyear = int(year)\n\tmonth = int(month)\n\ttimestamp = datetime.datetime(year, month, 1)\n\t\n\t#initialize container for dates to be stored\n\tdate_list = []\n\t\n\tevents = Event.objects.filter(edate__year=year).filter(edate__month=month)\n\tfor event in events:\n\t\tdate_list.append({'id':event.id, 'day':datetime.date(event.edate.year, event.edate.month, event.edate.day), 'title':event.title, 'class':'event'})\n\n\tprojects = Project.objects.filter(due__year=year).filter(due__month=month)\n\tfor project in projects:\n\t\tdate_list.append({'id':project.id, 'day':datetime.date(project.due.year, project.due.month, project.due.day), 'title':project.name, 'class':'projects'})\n\t\t\t\n\t# next month's timestamp\n\tif month == 12:\n\t\tnext_month = datetime.datetime(year+1, 1, 1)\n\telif month < 12:\n\t\tnext_month = datetime.datetime(year, month+1, 1)\n\t\n\tupcoming_projects = Project.objects.filter(due__year=next_month.year).filter(due__month=next_month.month)\n\t\n\t\n\treturn render_to_response('schedule_cal.html', \n\t\t\t\t {'date_list':date_list, \n\t\t\t\t 'date':timestamp, \n 'urlprefix': urlprefix (),\n\t\t\t\t 'upcoming_projects':upcoming_projects}, \n\t\t\t\t )", "def _calendar(self):\n schedule = self.account.schedule()\n calendar = self._calendars[config.outlook_calendar]\n\n return calendar", "def __init__(self, d, m, y):\n\n self.set_calendar(d, m, y)", "def calendars(self):\n return self.calendar_home_set.calendars()", "def calender(self, month, year):\n\n day = ['S', ' M', ' T', ' W', ' Th', 'F', ' S']\n\n days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n\n values = 1\n d = 1\n\n m = month\n y = year\n y0 = y - (14 - m) // 12\n x = y0 + y0 // 4 - y0 // 100 + y0 // 400\n m0 = m + 12 * ((14 - m) // 12) - 2\n d0 = (d + x + 31 * m0 // 12) % 7\n\n if utility_obj.isleap_year(str(year)):\n days[1] = 29\n row = 6\n column = 7\n two_d_array = [[0 for j in range(column)] for i in range(row)]\n\n print('Your Calender is Ready\\n')\n\n for i in range(0, 6 + 1):\n print(day[i], end=' ')\n print()\n for i in range(row):\n\n for j in range(column):\n\n if values <= days[m - 1]:\n if i == 0 and j < d0:\n two_d_array[i][j] = ' '\n continue\n\n two_d_array[i][j] = values\n values += 1\n\n for i in range(row):\n\n for j in range(column):\n if two_d_array[i][j] != 0:\n x = two_d_array[i][j]\n x1 = str(x).ljust(2)\n print(x1, end=\" \")\n\n print()", "def test_calendar_view_list(self):\n response = self.client.get('/module/calendar/')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'frontend/appointment/calendar/list.html')\n\n request = self.factory.get('/module/calendar/')\n request.user = self.user\n request.session = {}\n response = calendar_list(request)\n self.assertEqual(response.status_code, 200)", "def get_ivol_calendar_r(\n self,\n\n headers: t.Dict[str, str] = None,\n body: JSONEncodable = None,\n fields_data: t.Dict[str, str] = None,\n **kwargs\n ):\n r = self._do_call(\n method='GET',\n url=f'{self.API_BASE_URL}/ivol/calendar',\n headers=headers,\n body=body,\n fields=fields_data,\n **kwargs\n )\n return r", "def modify_cal(cal, convert_dic):\n new_cal = Calendar()\n for elm in cal.walk():\n if elm.name == \"VEVENT\":\n event = elm\n event[\"summary\"] = convert_dic[str(elm.get(\"summary\"))] \n new_cal.add_component(event)\n return new_cal", "def events(self, calendar=\"\", date=\"\"):\n command = list(CALENDAR_COMMAND)\n\n if calendar:\n command.extend([\"-f\",\n join(self.calendar_directory,\n \"calendar.{}\".format(calendar))])\n\n if date:\n command.extend([\"-t\", \"{}\".format(date)])\n\n calendar_output = subprocess.check_output(command).decode(\"utf-8\")\n # Split the lines and filter the empty lines.\n lines = [line for line in calendar_output.split(\"\\n\") if line]\n lines_copy = list(lines)\n index = 0\n for event in lines:\n if event.startswith(\"\\t\") or event.startswith(\" \"):\n # This line is a continuation of the previous one.\n lines_copy[index - 1] += event\n else:\n lines_copy[index] = event\n index += 1\n\n # Substitute multiple whitespaces by one space.\n events = [' '.join(event.split()) for event in lines_copy[:index]]\n\n # Replace '&' by 'and' because PicoTTS pronounces it as 'ampersand'.\n # See https://github.com/snipsco/snips-issues/issues/85\n events = [event.replace('&', 'and') for event in events]\n\n # Create a sentence with the date and a new sentence with the description.\n # Strip the asterisk (*) after a date. This means the date changes from year to year.\n return [event[:6] + '.' + event[6:].strip(\"*\") for event in events]", "def __str__(self):\n return str(self.GetCalendarString())", "def __str__(self):\n return str(self.GetCalendarString())", "def view_attendance(request):\n\n\tcontext_dict = {\n\t\t'title': 'All Attendance',\n\t}\n\treturn render(request, \"viewAttendance.html\", context_dict)", "def touragenda(request):\n active_events = TourAgendaModel.objects.order_by('number')\n friday_events = TourAgendaModel.objects.all().filter(day='FRIDAY')\n saturday_events = TourAgendaModel.objects.all().filter(day='SATURDAY')\n sunday_events = TourAgendaModel.objects.all().filter(day='SUNDAY')\n\n context = {\n 'active_events': active_events,\n 'friday_events': friday_events,\n 'saturday_events': saturday_events,\n 'sunday_events': sunday_events,\n }\n\n return render(request, 'tourAgenda.html', context=context)", "def create_calendar(actions, location_and_time_axes):\n calendar = ical.Calendar()\n calendar['PRODID'] = '{} {}'.format(ical.__name__, ical.__version__)\n calendar['VERSION'] = 2.0\n calendar['X-WR-CALNAME'] = 'PyCon.DE 2018'\n\n for location, date in actions.keys():\n meta_info = location_and_time_axes[(date.year, date.month, date.day)]\n time_axis = meta_info['time_axis']\n for action in actions[(location, date)]:\n if action['title'] == 'End':\n continue\n\n event = create_event(action, date, location, time_axis)\n\n calendar.add_component(event)\n\n return calendar", "def make_calendar(\n self, name=None, cal_id=None, supported_calendar_component_set=None\n ):\n return self.calendar_home_set.make_calendar(\n name,\n cal_id,\n supported_calendar_component_set=supported_calendar_component_set,\n )", "def on_btnCalendarResIn_clicked(self,widget):\n try:\n variables.semaforo = 2\n variables.vencalendar.connect('delete-event', lambda w, e: w.hide() or True)\n variables.vencalendar.show()\n except:\n print('error abrir calendario')", "def __addCalendar(self, store, element):\n \n calendar = self.__getStore(store, element.get(\"type\"))\n\n # Months Widths\n if element.find(\"months/monthContext/monthWidth\") is not None:\n months = self.__getStore(calendar, \"month\")\n for child in element.findall(\"months/monthContext/monthWidth\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if not format in months:\n months[format] = {}\n \n for month in child.findall(\"month\"):\n if not month.get(\"draft\"):\n name = month.get(\"type\").upper()\n if not name in months[format]:\n months[format][name] = month.text\n\n\n # Day Widths\n if element.find(\"days/dayContext/dayWidth\") is not None:\n days = self.__getStore(calendar, \"day\")\n for child in element.findall(\"days/dayContext/dayWidth\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if not format in days:\n days[format] = {}\n\n for day in child.findall(\"day\"):\n if not day.get(\"draft\"):\n name = day.get(\"type\").upper()\n if not name in days[format]:\n days[format][name] = day.text\n\n\n # Quarter Widths\n if element.find(\"quarters/quarterContext/quarterWidth\") is not None:\n quarters = self.__getStore(calendar, \"quarter\")\n for child in element.findall(\"quarters/quarterContext/quarterWidth\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if not format in quarters:\n quarters[format] = {}\n\n for quarter in child.findall(\"quarter\"):\n if not quarter.get(\"draft\"):\n name = quarter.get(\"type\").upper()\n if not name in quarters[format]:\n quarters[format][name] = quarter.text\n \n \n # Date Formats\n if element.find(\"dateFormats/dateFormatLength\") is not None:\n dateFormats = self.__getStore(calendar, \"date\")\n for child in element.findall(\"dateFormats/dateFormatLength\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\").upper()\n text = child.find(\"dateFormat/pattern\").text\n if not format in dateFormats:\n dateFormats[format] = text\n\n\n # Time Formats\n if element.find(\"timeFormats/timeFormatLength\") is not None:\n timeFormats = self.__getStore(calendar, \"time\")\n for child in element.findall(\"timeFormats/timeFormatLength\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\").upper()\n text = child.find(\"timeFormat/pattern\").text\n if not format in timeFormats:\n timeFormats[format] = text\n \n \n # DateTime Formats\n if element.find(\"dateTimeFormats/availableFormats\") is not None:\n datetime = self.__getStore(calendar, \"datetime\")\n for child in element.findall(\"dateTimeFormats/availableFormats/dateFormatItem\"):\n if not child.get(\"draft\"):\n # no uppercase here, because of intentianal camelcase\n format = child.get(\"id\")\n text = child.text\n if not format in datetime:\n datetime[format] = text\n \n \n # Fields\n if element.find(\"fields/field\") is not None:\n fields = self.__getStore(calendar, \"field\")\n for child in element.findall(\"fields/field\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\").upper()\n for nameChild in child.findall(\"displayName\"):\n if not nameChild.get(\"draft\"):\n text = nameChild.text\n if not format in fields:\n fields[format] = text\n break\n \n \n # Relative\n if element.find(\"fields/field\") is not None:\n relatives = self.__getStore(calendar, \"relative\")\n for child in element.findall(\"fields/field\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if child.findall(\"relative\"):\n relativeField = self.__getStore(relatives, format)\n for relChild in child.findall(\"relative\"):\n if not relChild.get(\"draft\"):\n pos = relChild.get(\"type\")\n text = relChild.text\n if not pos in relativeField:\n relativeField[pos] = text", "def _view_schedule(self):\n def plus_top_attach(f):\n\n def plus(*args, **kwargs):\n top_attach, left_attach = f(*args, **kwargs)\n return top_attach + 1, left_attach + 1\n\n return plus\n\n @plus_top_attach\n def create_label(text, left_attach, right_attach,\n top_attach, bottom_attach, align=None):\n label = gtk.Label('<span font=\"%s\">%s</span>' %\n (Params().get_default_font(), text))\n label.set_use_markup(True)\n if align == 'left':\n label.set_alignment(xalign=0.0, yalign=0.5)\n elif align == 'right':\n label.set_alignment(xalign=1.0, yalign=0.5)\n self.table.attach(label, left_attach, right_attach,\n top_attach, bottom_attach, xoptions=gtk.FILL, yoptions=False)\n label.show()\n return top_attach, left_attach\n\n @plus_top_attach\n def create_separator(left_attach, right_attach,\n top_attach, bottom_attach):\n separator = gtk.HSeparator()\n self.table.attach(separator, left_attach, right_attach,\n top_attach, bottom_attach, xoptions=gtk.FILL, yoptions=False)\n separator.show()\n return top_attach, left_attach\n\n tattach, tlen, view_sch = 0, 0, Params().get_view_sch()\n for i in view_sch:\n if i:\n tlen += 1\n for day in ['Monday', 'Tuesday', 'Wednesday',\n 'Thursday', 'Friday', 'Saturday']:\n tattach = create_label('<b><span color=\"%s\">%s</span></b>' %\n (Params().get_day_color(), day), 0, tlen,\n tattach, tattach + 1, 'left')[0]\n tattach = create_separator(0, tlen, tattach, tattach + 1)[0]\n\n schedule = Schedule().get_schedule(day,\n Schedule().get_current_week() - 1)\n for i in range(8):\n if not schedule[i][1] == '' and \\\n (schedule[i][0] == Schedule().get_subgroup() or\n schedule[i][0] == 2):\n if not schedule[i][2]:\n label_color = '%s' % str(Params().get_lecture_color())\n elif schedule[i][2] == 1:\n label_color = '%s' % \\\n str(Params().get_laboratory_color())\n elif schedule[i][2] == 2:\n label_color = '%s' % str(Params().get_practice_color())\n else:\n label_color = '%s' % str(Params().get_non_color())\n\n label_template = '<span color=\"%s\">%s</span>'\n lattach = 0\n if view_sch[0]:\n lattach = create_label('<span color=\"%s\">%d.</span>' %\n (label_color, i),\n lattach, lattach + 1, tattach, tattach + 1)[1]\n if view_sch[1]:\n lattach = create_label(label_template % (label_color,\n '-'.join(Schedule().get_lessons_time()[i])),\n lattach, lattach + 1, tattach, tattach + 1)[1]\n if view_sch[2]:\n lattach = create_label(label_template %\n (label_color, schedule[i][1]),\n lattach, lattach + 1,\n tattach, tattach + 1, 'left')[1]\n if view_sch[3]:\n lattach = create_label(label_template %\n (label_color, schedule[i][3]),\n lattach, lattach + 1, tattach, tattach + 1)[1]\n if view_sch[4]:\n create_label(label_template %\n (label_color, schedule[i][4]),\n lattach, lattach + 1,\n tattach, tattach + 1, 'right')\n tattach += 1", "def get_date_display(self, context):\n return '{year}/{month}/{day}'.format(\n year=self.get_year(),\n month=self.get_month().zfill(2),\n day=self.get_day().zfill(2))", "def _PrintAllEventsOnDefaultCalendar(self):\n\n feed = self.cal_client.GetCalendarEventFeed()\n print 'Events on Primary Calendar: %s' % (feed.title.text,)\n for i, an_event in zip(xrange(len(feed.entry)), feed.entry):\n print '\\t%s. %s' % (i, an_event.title.text,)\n for p, a_participant in zip(xrange(len(an_event.who)), an_event.who):\n print '\\t\\t%s. %s' % (p, a_participant.email,)\n print '\\t\\t\\t%s' % (a_participant.value,)\n if a_participant.attendee_status:\n print '\\t\\t\\t%s' % (a_participant.attendee_status.value,)", "def __init__(self, master=None, **kw):\r\n # remove custom options from kw before initializating ttk.Frame\r\n fwday = kw.pop('firstweekday', calendar.MONDAY)\r\n year = kw.pop('year', self.datetime.now().year)\r\n month = kw.pop('month', self.datetime.now().month)\r\n locale = kw.pop('locale', None)\r\n sel_bg = kw.pop('selectbackground', '#F2074E')\r\n sel_fg = kw.pop('selectforeground', '#05640e')\r\n\r\n self._date = self.datetime(year, month, 1)\r\n self._selection = None # no date selected\r\n\r\n ttk.Frame.__init__(self, master, **kw)\r\n\r\n self._cal = get_calendar(locale, fwday)\r\n\r\n self.__setup_styles() # creates custom styles\r\n self.__place_widgets() # pack/grid used widgets\r\n self.__config_calendar() # adjust calendar columns and setup tags\r\n # configure a canvas, and proper bindings, for selecting dates\r\n self.__setup_selection(sel_bg, sel_fg)\r\n\r\n # store items ids, used for insertion later\r\n self._items = [self._calendar.insert('', 'end', values='')\r\n for _ in range(6)]\r\n # insert dates in the currently empty calendar\r\n self._build_calendar()\r\n\r\n # set the minimal size for the widget\r\n self._calendar.bind('<Map>', self.__minsize)\r\n\r\n # start and stop dates\r\n self.startDate, self.stopDate = None, None", "def placeCalendarButton(data,row,target,path,alts,**kwargs):\n# printPretty(\"args: %s %s %s %s\" % (data,row,target,path))\n datebut = gtk.Button()\n datebut.show()\n image = gtk.Image()\n image.set_from_file(\"img/date.png\")\n datebut.set_image(image)\n datebut.unset_flags(gtk.CAN_FOCUS)\n datebut.connect(\"clicked\",dateChoose,target,data,path,alts,kwargs)\n datebut.set_tooltip_text(\"Click to choose date from calendar\")\n row.pack_start(datebut,0,0,2)", "def func_calendar_list():\r\n creds = None\r\n global page_token\r\n #global new_calendar_list=[]\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'credentials.json', SCOPES)\r\n creds = flow.run_local_server(port=0)\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n\r\n service = build('calendar', 'v3', credentials=creds)\r\n\r\n calendar_list = service.calendarList().list(pageToken=page_token).execute()\r\n new_calendar_list = []\r\n for calendar_list_entry in calendar_list['items']:\r\n new_calendar_list.append(calendar_list_entry['summary'])\r\n page_token = calendar_list.get('nextPageToken')\r\n return (new_calendar_list)", "def icon(self):\n return 'mdi:calendar'", "def export_event(self):\n\n cal = Eve()\n cal.add('summary', str(self.categories))\n cal.add('description', self.label)\n cal.add('dtstart', vDatetime(self.start))\n cal.add('dtend', vDatetime(self.end))\n return cal.to_ical()", "def on_date_change(self):\n self.date = self.ui.calendarWidget.selectedDate()\n self.update_views()", "def _trading_calendar(bundle=DEFAULT_BUNDLE):\n bundle_data = bundles.load(bundle)\n return bundle_data.equity_daily_bar_reader.trading_calendar", "def calendar(self):\n if \"calendar\" in self._prop_dict:\n if isinstance(self._prop_dict[\"calendar\"], OneDriveObjectBase):\n return self._prop_dict[\"calendar\"]\n else :\n self._prop_dict[\"calendar\"] = Calendar(self._prop_dict[\"calendar\"])\n return self._prop_dict[\"calendar\"]\n\n return None", "def calendar(self):\n if \"calendar\" in self._prop_dict:\n if isinstance(self._prop_dict[\"calendar\"], OneDriveObjectBase):\n return self._prop_dict[\"calendar\"]\n else :\n self._prop_dict[\"calendar\"] = Calendar(self._prop_dict[\"calendar\"])\n return self._prop_dict[\"calendar\"]\n\n return None", "def agenda_ical(request, num=None, name=None, acronym=None, session_id=None):\n meeting = get_meeting(num, type_in=None)\n schedule = get_schedule(meeting, name)\n updated = meeting.updated()\n\n if schedule is None and acronym is None and session_id is None:\n raise Http404\n\n assignments = SchedTimeSessAssignment.objects.filter(\n schedule__in=[schedule, schedule.base],\n session__on_agenda=True,\n )\n assignments = preprocess_assignments_for_agenda(assignments, meeting)\n AgendaKeywordTagger(assignments=assignments).apply()\n\n try:\n filt_params = parse_agenda_filter_params(request.GET)\n except ValueError as e:\n return HttpResponseBadRequest(str(e))\n\n if filt_params is not None:\n # Apply the filter\n assignments = [a for a in assignments if should_include_assignment(filt_params, a)]\n\n if acronym:\n assignments = [ a for a in assignments if a.session.historic_group and a.session.historic_group.acronym == acronym ]\n elif session_id:\n assignments = [ a for a in assignments if a.session_id == int(session_id) ]\n\n for a in assignments:\n if a.session:\n a.session.ical_status = ical_session_status(a)\n\n return render(request, \"meeting/agenda.ics\", {\n \"schedule\": schedule,\n \"assignments\": assignments,\n \"updated\": updated\n }, content_type=\"text/calendar\")", "def create_events_calendar():\n service = get_calendar_service()\n if not service:\n return\n calendar = {\n 'summary': 'Ting som skjer i Telemarkgruppa',\n 'timeZone': 'Europe/Oslo',\n }\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {\n 'role': 'reader',\n 'scope': {\n 'type': 'default'\n }\n }\n acl_insert_response = service.acl().insert(calendarId=cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response", "def _InsertCalendar(self, title='Little League Schedule',\n description='This calendar contains practice and game times',\n time_zone='America/Los_Angeles', hidden=False, location='Oakland',\n color='#2952A3'):\n print 'Creating new calendar with title \"%s\"' % title\n calendar = gdata.calendar.data.CalendarEntry()\n calendar.title = atom.data.Title(text=title)\n calendar.summary = atom.data.Summary(text=description)\n calendar.where.append(gdata.calendar.data.CalendarWhere(value=location))\n calendar.color = gdata.calendar.data.ColorProperty(value=color)\n calendar.timezone = gdata.calendar.data.TimeZoneProperty(value=time_zone)\n\n if hidden:\n calendar.hidden = gdata.calendar.data.HiddenProperty(value='true')\n else:\n calendar.hidden = gdata.calendar.data.HiddenProperty(value='false')\n\n new_calendar = self.cal_client.InsertCalendar(new_calendar=calendar)\n return new_calendar", "def nasaCalendar(self):\n return requests.get(self.nasaURL).text", "def getEvent(self):\n\n event = {\n \"summary\": \"H1 \" + self.summary,\n \"location\": LOCATION,\n # The H1 tag \"classifies\" event as a shift\n \"description\": self.description + \"\\n\\nH1 Communication arbetspass\",\n \"start\": {\n \"dateTime\": \"{0}T{1}{2}\".format(self.start.date(), self.start.time(), self.offset)\n },\n \"end\": {\n \"dateTime\": \"{0}T{1}{2}\".format(self.end.date(), self.end.time(), self.offset)\n },\n \"reminders\": {\n \"useDefault\": False,\n \"overrides\": [\n {\n \"method\": \"popup\",\n \"minutes\": 720 # 12 hours\n },\n {\n \"method\": \"popup\",\n \"minutes\": 5 # 12 hours\n }\n ]\n },\n \"colorId\": googleCalendar.EVENT_COLORIDS[\"yellow\"]\n\n }\n return event", "def calendar(self):\n from office365.outlook.calendar.calendar import Calendar\n return self.properties.get('calendar',\n Calendar(self.context, ResourcePath(\"calendar\", self.resource_path)))", "def display_dividend_calendar(\n date: Optional[str] = None,\n sortby: str = \"Dividend\",\n ascend: bool = False,\n limit: int = 10,\n export: str = \"\",\n sheet_name: Optional[str] = None,\n):\n\n if date is None:\n date = datetime.today().strftime(\"%Y-%m-%d\")\n\n div_map = {\n \"symbol\": \"Symbol\",\n \"companyName\": \"Name\",\n \"dividend_Ex_Date\": \"Ex-Dividend Date\",\n \"payment_Date\": \"Payment Date\",\n \"record_Date\": \"Record Date\",\n \"dividend_Rate\": \"Dividend\",\n \"indicated_Annual_Dividend\": \"Annual Dividend\",\n \"announcement_Date\": \"Announcement Date\",\n }\n calendar = nasdaq_model.get_dividend_cal(date)\n if calendar.empty:\n console.print(\n \"No data found. Check that the date provided is a market day. If it is then try this function\"\n \" again as the request may have not gone through.\\n\"\n )\n return\n calendar = calendar.drop(columns=[\"announcement_Date\"])\n calendar.columns = calendar.columns.map(div_map)\n calendar = calendar.sort_values(by=sortby, ascending=ascend)\n print_rich_table(\n calendar,\n headers=[x.title() for x in calendar.columns],\n title=f\"[bold]Dividend Calendar for {date}[/bold]\",\n export=bool(export),\n limit=limit,\n )\n export_data(\n export,\n os.path.dirname(os.path.abspath(__file__)),\n \"divcal\",\n calendar,\n sheet_name,\n )", "def general_timeline():\n return render_template('timeline.html', general=True, show_username=True)", "def generate_ics(events, config):\n\n # Create the Calendar\n calendar = icalendar.Calendar()\n calendar.add('prodid', config.calendar_prodid)\n calendar.add('version', '2.0')\n calendar.add('method', 'publish')\n\n for event_data in events:\n # Create the event\n event = icalendar.Event()\n\n # Populate the event\n event.add('summary', event_data['title'])\n event.add('description', get_description(event_data))\n event.add('uid', event_data['id'])\n event.add('location', event_data['place'])\n event.add('dtstart', get_datetime(event_data, 'when_start'))\n if event_data['when_end']:\n event.add('dtend', get_datetime(event_data, 'when_end'))\n event.add('dtstamp', datetime.datetime.now())\n\n # Add the event to the calendar\n calendar.add_component(event)\n\n return calendar.to_ical()", "def generate_ics(days: Sequence[dict], filename: Text) -> None:\n cal = Calendar()\n cal.add(\"X-WR-CALNAME\", \"ไธญๅ›ฝๆณ•ๅฎš่Š‚ๅ‡ๆ—ฅ\")\n cal.add(\"X-WR-CALDESC\", \"ไธญๅ›ฝๆณ•ๅฎš่Š‚ๅ‡ๆ—ฅๆ•ฐๆฎ๏ผŒ่‡ชๅŠจๆฏๆ—ฅๆŠ“ๅ–ๅ›ฝๅŠก้™ขๅ…ฌๅ‘Šใ€‚\")\n cal.add(\"VERSION\", \"2.0\")\n cal.add(\"METHOD\", \"PUBLISH\")\n cal.add(\"CLASS\", \"PUBLIC\")\n\n cal.add_component(_create_timezone())\n\n days = sorted(days, key=lambda x: x[\"date\"])\n\n for fr, to in _iter_date_ranges(days):\n start = _cast_date(fr[\"date\"])\n end = _cast_date(to[\"date\"]) + datetime.timedelta(days=1)\n\n name = fr[\"name\"] + \"ๅ‡ๆœŸ\"\n if not fr[\"isOffDay\"]:\n name = \"ไธŠ็ญ(่กฅ\" + name + \")\"\n cal.add_component(_create_event(name, start, end))\n\n with open(filename, \"wb\") as f:\n f.write(cal.to_ical())", "def appointment():\r\n return render_template(\r\n 'about.html',\r\n title='About',\r\n year=datetime.now().year,\r\n message='Your application description page.'\r\n )", "def get_date_list(self, queryset, date_type):\n date_field = self.get_date_field()\n dates_group = [list(qs.dates(date_field, date_type)) for qs in queryset]\n dates = [d.day for d in reduce(lambda a, b: a + b, dates_group, [])]\n\n calendar.setfirstweekday(6) # starts at sunday\n month = self.get_month()\n year = self.get_year()\n cal = calendar.monthcalendar(int(year), int(month))\n\n for i, week in enumerate(cal):\n for j, day in enumerate(week):\n state = models.DATE_STATES[dates.count(day)]\n cal[i][j] = {'day': day, 'state': state}\n return cal", "def save_calendar(calendar):\n with open(\"calendar.txt\", \"w\") as fl:\n ls_str = \"\"\n keys = list(calendar.keys())[::-1]\n for key in keys:\n ls_str += f\"{key}:\"\n for ev in calendar[key]:\n ls_str += f\"{str(ev['start']).zfill(2)}-{str(ev['end']).zfill(2)} {ev['title']}\\t\"\n ls_str = ls_str[:-1]\n ls_str += \"\\n\"\n fl.write(ls_str)\n return True", "def on_btnCalendarResOut_clicked(self, widget):\n try:\n variables.semaforo = 3\n variables.vencalendar.connect('delete-event', lambda w, e: w.hide() or True)\n variables.vencalendar.show()\n except:\n print('error abrir calendario')", "def _select_date_changed(self):\n self.model.edit_traits(view=View(\n UCustom('date'),\n buttons=['OK'],\n title=u'ๆ•ฐๆฎ็”Ÿๆˆๆ—ฅๆœŸ้€‰ๆ‹ฉ',\n kind='panel',\n ))", "def isocalendar(self, *args, **kwargs): # real signature unknown\r\n pass", "def calendar_list(self, calendar_id):\r\n return CalendarList(self, calendar_id)", "def main():\r\n credentials = get_credentials()\r\n http = credentials.authorize(httplib2.Http())\r\n service = discovery.build('calendar', 'v3', http=http)\r\n\r\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\r\n print('Getting the upcoming 10 events')\r\n eventsResult = service.events().list(\r\n calendarId='primary', timeMin=now, maxResults=10, singleEvents=True,\r\n orderBy='startTime').execute()\r\n events = eventsResult.get('items', [])\r\n\r\n if not events:\r\n print('No upcoming events found.')\r\n for event in events:\r\n start = event['start'].get('dateTime', event['start'].get('date'))\r\n print(start, event['summary'])", "def show_today():\n month_text = (\n \"JAN\",\n \"FEB\",\n \"MAR\",\n \"APR\",\n \"MAY\",\n \"JUN\",\n \"JUL\",\n \"AUG\",\n \"SEP\",\n \"OCT\",\n \"NOV\",\n \"DEC\",\n )[now.tm_mon - 1]\n day_text = \"{:2}\".format(now.tm_mday)\n\n date_label[0].text = month_text[0]\n date_label[1].text = month_text[1]\n date_label[2].text = month_text[2]\n date_label[3].text = day_text[0]\n date_label[4].text = day_text[1]", "def calendar(self, name=None, cal_id=None):\n if name and not cal_id:\n for calendar in self.calendars():\n display_name = calendar.get_display_name()\n if display_name == name:\n return calendar\n if name and not cal_id:\n raise error.NotFoundError(\n \"No calendar with name %s found under %s\" % (name, self.url)\n )\n if not cal_id and not name:\n return self.calendars()[0]\n\n if str(URL.objectify(cal_id).canonical()).startswith(\n str(self.client.url.canonical())\n ):\n url = self.client.url.join(cal_id)\n elif (\n isinstance(cal_id, URL)\n or cal_id.startswith(\"https://\")\n or cal_id.startswith(\"http://\")\n ):\n url = self.url.join(cal_id)\n else:\n url = self.url.join(quote(cal_id) + \"/\")\n\n return Calendar(self.client, name=name, parent=self, url=url, id=cal_id)", "def formatDay(self, themonth, date, num_weeks):\n if date.month == themonth:\n day_class = 'day'\n else:\n day_class = 'noday' # day outside month\n\n html = '<td class=\"%s' % day_class\n\n # if this is today then highlight it\n if date == self.today:\n html += ' today'\n today_text = 'Today '\n else:\n today_text = ''\n\n # if this is the selected date then tag it\n if date == self.selected_date or (self.selected_record\n and date == self.selected_record.start_date):\n html += ' selected'\n # if a filter range is set then tag it\n elif (self.filter_start_date and self.filter_finish_date\n and self.filter_start_date <= date\n and date <= self.filter_finish_date):\n html += ' filtered'\n\n html += ('\" style=\"height: %f%%\"><div class=\"%s_header\">'\n '<a class=\"block\" '\n 'href=\"?year=%d&month=%d&day=%d&clear_recording_id=1\">'\n '%s%d</a></div>' % (90.0 / num_weeks, day_class,\n date.year, date.month, date.day, today_text, date.day))\n\n if self._storage:\n for recording in self._storage.getRecordings(date,\n station=self.filter_station):\n extra_div_class = \"\"\n if (self.selected_record\n and recording.id == self.selected_record.id):\n extra_div_class += \" selected_entry\"\n if ((self.filter_title and self.filter_title\n != recording.title)\n or (self.filter_start_date and self.filter_start_date\n > recording.finish_time.date())\n or (self.filter_finish_date and self.filter_finish_date\n < recording.start_time.date())):\n extra_div_class += \" filtered_out\"\n html += ('<div class=\"day_entry%s\"><a class=\"block\" '\n 'href=\"?year=%d&month=%d&recording_id=%d'\n '&set_recording_id=1\">\\n'\n '<span class=\"recording_time\">%s</span>\\n'\n '<span class=\"recording_station\">%s</span>\\n'\n '<span class=\"recording_title\">%s</span>\\n'\n '</a></div>\\n' % (extra_div_class, date.year,\n date.month, recording.id,\n formatTimeUI(recording.start_time, compact=True),\n formatStationName(recording.station, compact=True),\n recording.title))\n\n return html + '</td>'", "def check_calender_api():\n cal = CalendarUtil()\n fromdate = datetime(2020, 5, 27, 19, 30, 0)\n todate = fromdate + timedelta(hours=0)\n event = cal.addToCalendar(\"avishekh.bharati@gmail.com\", fromdate, todate, \"this is summary...\")\n print(event)\n return jsonify({\"success\": True})" ]
[ "0.7174143", "0.6856298", "0.68165946", "0.65732276", "0.64761686", "0.64670926", "0.6392814", "0.6325244", "0.6318216", "0.61248857", "0.61215866", "0.61215866", "0.611085", "0.6099675", "0.6070785", "0.60701156", "0.60354185", "0.6012115", "0.6007833", "0.60075265", "0.6006172", "0.6004829", "0.59994155", "0.59919924", "0.5987288", "0.59668106", "0.5958764", "0.59361434", "0.58963656", "0.58963615", "0.5890954", "0.5887088", "0.5886194", "0.5871847", "0.5846905", "0.5846905", "0.58434445", "0.58434445", "0.5841854", "0.58362573", "0.58331704", "0.5828742", "0.5800675", "0.577982", "0.57592535", "0.5756108", "0.57434607", "0.5730003", "0.57052314", "0.5703794", "0.56995356", "0.5676567", "0.56762064", "0.56708246", "0.56659234", "0.56413364", "0.56267405", "0.56209284", "0.56190544", "0.56190544", "0.5612697", "0.56021935", "0.5591153", "0.5581803", "0.5581471", "0.5572066", "0.55484176", "0.5547151", "0.553628", "0.5518729", "0.5498815", "0.5498605", "0.5493496", "0.5472066", "0.54634356", "0.54627144", "0.5452088", "0.5452088", "0.542657", "0.5415339", "0.5410131", "0.5408929", "0.54089004", "0.5404918", "0.5404437", "0.5400213", "0.5396667", "0.5375816", "0.53664553", "0.5350859", "0.5341161", "0.53408754", "0.5339874", "0.53299886", "0.53268194", "0.53247046", "0.5315308", "0.52970946", "0.5296962", "0.52953345" ]
0.5767331
44
View function to handle our create appointment form
def save_appointment_details(request, calendar_id): def schedule_mail(reminder_date, appointment): # Configure our scheduler for reminder try: trigger = DateTrigger(run_date=reminder_date) scheduler.add_job(send_appointment_mail, args=[appointment], trigger=trigger) except Exception as exp: print(exp) def schedule_sms(reminder_date, appointment): # Configure our scheduler for reminder try: trigger = DateTrigger(run_date=reminder_date) scheduler.add_job(send_appointment_sms, args=[appointment], trigger=trigger) except Exception as exp: print(exp) start_time = request.GET['start_time'][:19] end_time = request.GET['end_time'][:19] start_time = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S") end_time=datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S") calendar_obj = Calendar.objects.get(pk=calendar_id) # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = AppointmentForm(request.POST) # check whether it's valid and save it if form.is_valid(): # Save appointment details mobilephone = form.data['mobilephone'] email = form.data['email'] first_name = form.data['first_name'] last_name = form.data['last_name'] notes = form.data['notes'] appointment = Appointment(start_time=start_time, end_time=end_time, first_name=first_name, last_name=last_name, email=email, mobilephone=mobilephone, notes=notes) appointment.calendar = calendar_obj appointment.save() try: send_appointment_mail(appointment) # send appointment details email except Exception as exp: print(exp) try: send_appointment_sms(appointment) # send appointment details sms except Exception as exp: print(exp) # Calculate reminder schedule dates reminder1 = start_time - timedelta(hours=2) reminder2 = start_time - timedelta(hours=24) reminder3 = start_time - timedelta(days=7) # Schedule mails schedule_mail(reminder1, appointment) schedule_mail(reminder2, appointment) schedule_mail(reminder3, appointment) # Schedule sms schedule_sms(reminder1, appointment) schedule_sms(reminder2, appointment) schedule_sms(reminder3, appointment) return redirect(reverse('appointment:complete_appointment', args=[calendar_id])) # if a GET (or any other method) we'll create a blank form else: form = AppointmentForm() return render(request, 'appointment_form.html', {'form': form, 'start_time': start_time, 'end_time': end_time, 'office_location': calendar_obj.office_location})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_appointment():\n\n form = AppointmentForm()\n\n if form.validate_on_submit():\n\n appointment = Appointment(\n title = form.title.data,\n description = form.description.data,\n location = form.location.data,\n start = form.start.data,\n client = form.client.data,\n user = current_user\n )\n\n try:\n db.session.add(appointment)\n db.session.commit()\n\n flash('Successfully created the appointment.')\n\n return redirect(url_for('appointment.read_appointments'))\n except:\n flash('Error creating the appointment')\n\n return render_template('appointments/form.html.j2', form=form, title='Create appointment')", "def create_appointment(request):\n dates = get_dates()\n users = User.objects.all()\n\n if request.POST:\n new_appointment = create_appointment_form(request, request.POST)\n if new_appointment:\n messages.add_message(request, messages.SUCCESS, 'Your appointment as been created successfully.')\n else:\n messages.add_message(request, messages.ERROR, 'An error occurred. Your appointment could not be created.'\n 'If this error persists, try contacting our service desk at'\n '1-800-RIX-AJAZ')\n return redirect('view_appointments')\n\n return render(request, 'create_appointment.html', {'the_user': request.user,\n 'dates': dates,\n 'users': users,\n 'hours': range(1, 13),\n 'minutes': range(1, 60)})", "def create_appointment():\n\n msg = render_template('date')\n return question(msg)", "def create_appointment_form(request, post):\n # string_date = \"{0}-{1}-{2}\".format(year, month, day)\n # date = datetime.datetime.strptime(string_date, '%Y-%m-%d').date()\n new_appointment = None\n date_string = post.get(\"date\") + \"-\" + post.get(\"time\")\n date = datetime.datetime.strptime(date_string, '%Y-%m-%d-%H:%M')\n the_user = request.user\n notes = post.get(\"notes\")\n\n if the_user.userprofile.is_doctor():\n patient_id = int(post.get(\"patient\", the_user.pk))\n patient = User.objects.get(pk=patient_id)\n doctor = User.objects.get(pk=the_user.id)\n new_appointment = Appointment.objects.create(date=date, doctor=doctor, patient=patient, notes=notes)\n\n elif request.user.userprofile.is_patient():\n doctor_id = int(post.get(\"doctor\", the_user.pk))\n doctor = User.objects.get(pk=doctor_id)\n patient = User.objects.get(pk=the_user.id)\n new_appointment = Appointment.objects.create(date=date, doctor=doctor, patient=patient, notes=notes)\n\n return new_appointment", "def create_patient_appointment():\n if request.method == 'POST':\n patient_email = request.form['patient_email']\n doctor_email = request.form['doctor_email']\n date = request.form['date']\n time = request.form['time']\n\n response = requests.post(server_url + 'patient/create_appointment', json={\n 'patient_email': patient_email,\n 'doctor_email': doctor_email,\n 'date': date,\n 'time': time\n })\n\n response = response.json()\n\n if response.get('Status') == \"DOCTOR_HAS_AN_APPOINTMENT_SELECTED_TIME_SLOT\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"DOCTOR_IS_NOT_AVAILABLE_AT_THAT_TIME\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"INVALID_PATIENT_EMAIL\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"INVALID_DOCTOR_EMAIL\":\n return render_template('patients/appointment_failed.html')\n else:\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return render_template('patients/dashboard.html')", "def clerk_create_appointment():\n if request.method == 'POST':\n patient_email = request.form['patient_email']\n doctor_email = request.form['doctor_email']\n date = request.form['date']\n time = request.form['time']\n\n response_clerk_create_appointment = requests.post(server_url + 'medical_clerk/create_appointment', json={\n 'patient_email': patient_email,\n 'doctor_email': doctor_email,\n 'date': date,\n 'time': time\n })\n response_clerk_create_appointment = response_clerk_create_appointment.json()\n\n if response_clerk_create_appointment.get('Status') == \"INVALID_DOCTOR_EMAIL\":\n return render_template('clerks/clerk_appointment_failed.html')\n elif response_clerk_create_appointment.get('Status') == \"INVALID_PATIENT_EMAIL\":\n return render_template('clerks/clerk_appointment_failed.html')\n elif response_clerk_create_appointment.get('Status') == \"DOCTOR_IS_NOT_AVAILABLE_AT_THAT_TIME\":\n return render_template('clerks/clerk_appointment_failed.html')\n elif response_clerk_create_appointment.get('Status') == \"DOCTOR_HAS_AN_APPOINTMENT_SELECTED_TIME_SLOT\":\n return render_template('clerks/clerk_appointment_failed.html')\n else:\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return render_template('clerks/home.html')", "def create_calendar(request):\n if request.method == 'POST':\n\n form = CalendarForm(request.POST)\n \n if form.is_valid():\n calendar = form.save(commit=False) # prvent form from saving since we need to link company\n calendar.company = request.user.company\n calendar.save()\n return redirect('appointment:calendar_list')\n else:\n form = CalendarForm()\n return render(request, 'calendar_form.html', {'form': form})", "def appointment():\r\n return render_template(\r\n 'about.html',\r\n title='About',\r\n year=datetime.now().year,\r\n message='Your application description page.'\r\n )", "def add_event(request, owner_type, owner_id):\n\n # Like before, get the request's context.\n context = RequestContext(request)\n event_added = False\n\n user = request.user\n # If it's a HTTP POST, we're interested in processing form data.\n if request.method == 'POST':\n verified_obj = verified_calendar(context, owner_type, owner_id, user)\n if not isinstance(verified_obj, HttpResponse):\n calendar, edit_priv = verified_obj\n else:\n return verified_obj\n\n # Attempt to grab information from the raw form information.\n event_form = EventForm(data=request.POST)\n if event_form.is_valid():\n # Save the event's form data to the database.\n event = event_form.save(commit=False)\n event.cal = calendar\n event.creator = user\n\n event.save()\n\n event_added = True\n\n #notify the subscribers\n created_event.send(sender=None, owner_type=owner_type, owner_id=owner_id,\n event=event, user=user)\n\n\n # Invalid form or forms - mistakes or something else?\n # Print problems to the terminal.\n # They'll also be shown to the user.\n else:\n print event_form.errors\n\n # Not a HTTP POST, so we render our form using the EventForm.\n # These forms will be blank, ready for user input.\n else:\n event_form = EventForm()\n\n # Render the template depending on the context.\n return render_to_response(\n 'scheduler/add_event.html', {'event_form': event_form, 'user' : user,\n 'event_added': event_added},\n context)", "def create_event():\n event = None\n form = CreateEventForm()\n if form.validate_on_submit():\n venue = Venue.query.filter_by(address=form.address.data).first()\n if venue is None: # venue not already in db, need to add it\n venue_data = form.data\n venue_data[\"name\"] = venue_data[\"venue_name\"]\n venue_data[\"state\"] = CreateEventForm.convert_choice_to_value(form.state.data, \"STATES\")\n venue = Venue.create(**venue_data)\n event_type = EventType.query.get(form.event_type.data)\n event_category = EventCategory.query.get(form.category.data)\n start_time = CreateEventForm.convert_choice_to_value(form.start_time.data, \"TIMES\")\n end_time = CreateEventForm.convert_choice_to_value(form.end_time.data, \"TIMES\")\n event = Event(\n title=form.title.data,\n start_datetime=datetime.combine(form.start_date.data, start_time),\n end_datetime=datetime.combine(form.end_date.data, end_time),\n venue=venue,\n event_type=event_type,\n event_category=event_category,\n user=current_user._get_current_object(),\n )\n db.session.commit()\n return redirect(url_for(\"events.event_details\", id=event.id))\n return render_template(\"events/create_event.html\", form=form, event=event)", "def edit_appointment(request, id):\n users = User.objects.all()\n appointment = get_object_or_404(Appointment, pk=id)\n if request.POST:\n post = request.POST\n date_string = post.get(\"date\") + \"-\" + post.get(\"time\")\n try:\n date = datetime.datetime.strptime(date_string, '%Y-%m-%d-%H:%M')\n appointment.date = date\n except ValueError:\n pass\n the_user = request.user\n notes = post.get(\"notes\")\n appointment.notes = notes\n\n if the_user.userprofile.is_doctor():\n try:\n patient_id = int(post.get(\"patient\", the_user.pk))\n patient = User.objects.get(pk=patient_id)\n appointment.patient = patient\n except ValueError:\n pass\n\n elif request.user.userprofile.is_patient():\n try:\n doctor_id = int(post.get(\"doctor\", the_user.pk))\n doctor = User.objects.get(pk=doctor_id)\n appointment.doctor = doctor\n except ValueError:\n pass\n\n if appointment:\n messages.add_message(request, messages.SUCCESS, 'Your changes have been saved.')\n else:\n messages.add_message(request, messages.ERROR, 'An error occurred. Please contact an admin for assistance.')\n appointment.save()\n return redirect('view_appointments')\n return render(request, 'edit_appointment.html', {'appointment': appointment,\n 'the_user': request.user,\n 'users': users})", "def add_event():\n\n business = request.form.get('bus_name')\n name_evt = request.form.get('name_evt')\n\n start = request.form.get('start')\n end = request.form.get('end')\n description = request.form.get('description')\n\n #TODO might run into service option problems\n # service = request.form.get('service')\n\n #business = get bus_id from session?\n\n # new_evt = crud.create_event(name_evt, start, end, description, service, business)\n\n # return redirect('/')\n \n return render_template('add_evts.html')", "def timesheet_form(request):\r\n timsheetForm = TimeSheetForm()\r\n internalForm = InternalForm()\r\n\r\n return render(\r\n request,\r\n 'timesheet/forms/add.html',\r\n {\r\n 'timsheetForm':timsheetForm,\r\n 'internalForm':internalForm\r\n }\r\n )", "def create_venue_form():\n form = VenueForm()\n return render_template('forms/new_venue.html', form=form)", "def office_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n office_form = OfficeForm()\n return render_to_response('office_form.html', {'form': office_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n office_form = OfficeForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if office_form.is_valid():\n of = office_form.save(commit=False)\n of.company = company\n of.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('office_form.html', \n {'form': office_form, 'form_errors': office_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def update_appointment(request,pk):\n appointment = AppointmentRequests.objects.get(id=pk)\n form = AppointmentUpdate(instance=appointment)\n if request.method == \"POST\":\n form = AppointmentUpdate(request.POST,instance=appointment)\n if form.is_valid():\n form.save()\n return redirect(\"dashboard\")\n else:\n messages.info(request,\"Invalid Data sent, Make sure you provided right data.\")\n return redirect(\"update_appointment\",pk=pk)\n else:\n return render(request,\"update_appointment.html\",{\"form\":form})", "def book(request):\n if request.method == \"POST\": # getting all fields\n first_name = request.POST.get(\"first_name\") \n last_name = request.POST.get(\"last_name\")\n email_address = request.POST.get(\"email_address\")\n phone_code = request.POST.get(\"phone_code\")\n phone_number = request.POST.get(\"phone_number\")\n countries = request.POST.getlist(\"countries\")\n company = request.POST.get(\"company\")\n objective = request.POST.get(\"objective\")\n details = request.POST.get(\"details\")\n print(first_name,last_name,email_address,phone_code,phone_number,countries,company,objective,details)\n # if all fields not None and have value\n if first_name and last_name and email_address and phone_code and phone_number and countries and company and objective and details:\n try: # to check that phone number is not text, try to convert it to integar\n phone_number = int(phone_number)\n except: # if failed to be converted to integar\n messages.info(request,\"Phone number field must be filled with numbers only.\") # display this message for user\n return redirect(\"book\") # reload the page\n mobile_number = phone_code + str(phone_number) # getting complete mobile number as string\n selected_countries = \", \".join(countries) # converting countries list to be saved as string\n print(selected_countries)\n if not AppointmentRequests.objects.filter(phone_number=mobile_number): # if a user tries to request an appointment with new info of mobile number and email address (not already exist in database)\n if not AppointmentRequests.objects.filter(email_address=email_address):\n\n AppointmentRequests.objects.create(first_name=first_name,last_name=last_name,email_address=email_address,phone_number=mobile_number,\n countries=selected_countries,company= company,objective=objective, details=details) # create an appointment\n\n\n # send email to user\n send_mail( \n subject=f\"Service Provider Appointment\",\n message=f\"\"\"\n Dear {first_name} {last_name},\n [+] Your Info provided:\n 1- First name: {first_name}.\n 2- Last name: {last_name}.\n 3- Email address: {email_address}.\n 4- Phone number: {mobile_number}.\n 5- Countries: {selected_countries}.\n 6- Company: {company}.\n 7- Objective: {objective}.\n 8- Details:\n {details}\n \\n\n We will communicate with you as soon as possible.\n \"\"\",\n recipient_list=[email_address,],from_email=\"todotasks4000@gmail.com\",fail_silently=False,\n )\n # send email to service provider agent\n send_mail(\n subject=f\"A new requested Appointment by {first_name} {last_name}\",\n message=f\"\"\"\n [+] Info provided:\n 1- First name: {first_name}.\n 2- Last name: {last_name}.\n 3- Email address: {email_address}.\n 4- Phone number: {mobile_number}.\n 5- Countries: {selected_countries}.\n 6- Company: {company}.\n 7- Objective: {objective}.\n 8- Details:\n {details}\n \"\"\",\n recipient_list=[\"todotasks4000@gmail.com\",],from_email=\"todotasks4000@gmail.com\",fail_silently=False,\n )\n return redirect(\"confirm\")\n\n else:\n messages.info(request,\"You have already sent a request, we will communicate you as soon as possible, we will handle any changes you want (if exist) when contact.\")\n return redirect(\"book\") # reload the page\n\n else: # if user tries to request a new appointment using same mobile number\n messages.info(request,\"You have already sent a request, we will communicate you as soon as possible, we will handle any changes you want (if exist) when contact.\")\n return redirect(\"book\") # reload the page\n \n\n\n else: # if any field is empty or None\n messages.info(request,\"Please, fill empty fields\")\n return redirect(\"book\") # reload the page\n \n return render(request,\"book_appointment.html\")", "def createForm(request):\n if request.method == 'POST':\n form = QuestionFormForm(request.POST)\n if form.is_valid():\n #return the uuid so the organization can use that link in the post to connect to the questionform\n formID = form.save().UUID\n #send them the url for the form\n messages.success(request, 'You have made your question form accessible at: ' + request.build_absolute_uri('/post/') + f'apply/{formID}')\n context = {'form': form}\n return render(request, 'scholarship.html', context=context)\n form = QuestionFormForm()\n context = {'form': form}\n return render(request, 'scholarship.html', context=context)", "def app_form():\n\n return render_template(\"application-form.html\")", "def complete_appointment(request, calendar_id):\n calendar = Calendar.objects.get(pk=calendar_id)\n return render(request, 'complete_appointment.html', {'calendar': calendar})", "def application_form():\n\n\treturn render_template(\"application-form.html\")", "def create_event(request):\n form = EventForm(request.POST, request.FILES)\n number_people = form.data.get('number_people')\n arrange_time = form.data.get('arrange_time')\n if request.method == 'POST':\n if form.is_valid():\n if int(number_people) >= 10:\n try:\n if datetime.datetime.strptime(arrange_time,'%Y-%m-%d %H:%M').date() > timezone.now().date():\n photo = form.cleaned_data.get('photo') \n event_name = form.data.get('event_name')\n location = form.data.get('location')\n short_description = form.data.get('short_description')\n long_description = form.data.get('long_description')\n event = Event(event_name = event_name, location=location, short_description = short_description, long_description = long_description, arrange_time = arrange_time, number_people = number_people,full=False, photo=photo, user=request.user)\n event.save()\n messages.success(request, f\"You've created the {event_name} event!\")\n return HttpResponseRedirect(reverse('index'))\n else:\n messages.warning(request, \"Arrangement date must be in the future!\")\n except:\n messages.warning(request, f\"You should input the date and time as format!\")\n return render(request, 'Kvent/create-event-page.html', {'form': form})\n else :\n messages.warning(request, \"Number of paricipants must more than 10 or equal\")\n else:\n messages.warning(request, f\"You should input the date and time as format!\")\n return render(request, 'Kvent/create-event-page.html', {'form': form})", "def get_app_form():\n\t\n\treturn render_template(\"application-form.html\")", "def award_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n award_form = AwardForm()\n return render_to_response('award_form.html', {'form': award_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n award_form = AwardForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if award_form.is_valid():\n af = award_form.save(commit=False)\n af.company = company\n af.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('award_form.html', \n {'form': award_form, 'form_errors': award_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def todos_create_page():\n todo = Todo()\n if todo.form_submit():\n todo.update(mongo.db)\n print('Created new TODO: {text}'.format(**todo.doc))\n return redirect('/')\n else:\n return render_template(\n template_name_or_list='todo.html',\n todo=todo,\n handle='Create')", "def make_form(self):", "def form_event(request, event_id):\n event = get_object_or_404(Event ,pk=event_id)\n if request.method == \"POST\":\n form = EventForm(request.POST, instance=event)\n if form.is_valid():\n form.save()\n else:\n form = EventForm(instance=event)\n\n context = {'event': event,\n 'event_form': form }\n\n return render_to_response('event_form.html',\n context,\n context_instance=RequestContext(request))", "def eventnew(request, id=None):\n context = {}\n # get instance if id is passed in\n if id:\n instance = get_object_or_404(BaseEvent, pk=id)\n context['new'] = False\n perms = ['events.view_events']\n if not (request.user.has_perms(perms) or\n request.user.has_perms(perms, instance)):\n raise PermissionDenied\n is_event2019 = isinstance(instance, Event2019)\n else:\n instance = None\n context['new'] = True\n is_event2019 = True\n perms = ['events.add_raw_event']\n if not request.user.has_perms(perms):\n raise PermissionDenied\n if is_event2019:\n mk_serviceinstance_formset = inlineformset_factory(BaseEvent, ServiceInstance, extra=3, exclude=[])\n mk_serviceinstance_formset.form = curry_class(ServiceInstanceForm, event=instance)\n context['is_event2019'] = is_event2019\n\n if request.method == 'POST':\n if instance:\n # calculate whether an email should be sent based on the event information *before* saving the form.\n should_send_email = not instance.test_event\n if should_send_email:\n bcc = [settings.EMAIL_TARGET_VP]\n if instance.has_projection:\n bcc.append(settings.EMAIL_TARGET_HP)\n\n if is_event2019:\n if instance:\n form = InternalEventForm2019(data=request.POST, request_user=request.user, instance=instance)\n else:\n form = InternalEventForm2019(data=request.POST, request_user=request.user)\n else:\n form = InternalEventForm(data=request.POST, request_user=request.user, instance=instance)\n if is_event2019:\n services_formset = mk_serviceinstance_formset(request.POST, request.FILES, instance=instance)\n\n if form.is_valid() and (not is_event2019 or services_formset.is_valid()):\n if instance:\n set_revision_comment('Edited', form)\n obj = form.save()\n if is_event2019:\n services_formset.save()\n if should_send_email:\n # BCC the crew chiefs\n for ccinstance in obj.ccinstances.all():\n if ccinstance.crew_chief.email:\n bcc.append(ccinstance.crew_chief.email)\n if obj.reviewed:\n subject = \"Reviewed Event Edited\"\n email_body = \"The following event was edited by %s after the event was reviewed for billing.\" \\\n % request.user.get_full_name()\n bcc.append(settings.EMAIL_TARGET_T)\n elif obj.approved:\n subject = \"Approved Event Edited\"\n email_body = \"The following event was edited by %s after the event was approved.\" % \\\n request.user.get_full_name()\n else:\n subject = \"Event Edited\"\n email_body = \"The following event was edited by %s.\" % request.user.get_full_name()\n # Add list of changed fields to the email\n if len(form.changed_data) > 0:\n email_body += \"\\nFields changed: \"\n for field_name in form.changed_data:\n email_body += field_name + \", \"\n email_body = email_body[:-2]\n # add HP to the email if projection was just added to the event\n if obj.has_projection and settings.EMAIL_TARGET_HP not in bcc:\n bcc.append(settings.EMAIL_TARGET_HP)\n to_emails = []\n if request.user.email:\n to_emails.append(request.user.email)\n email = EventEmailGenerator(event=obj, subject=subject, to_emails=to_emails, body=email_body, bcc=bcc)\n email.send()\n else:\n set_revision_comment('Created event', None)\n obj = form.save(commit=False)\n obj.submitted_by = request.user\n obj.submitted_ip = request.META.get('REMOTE_ADDR')\n obj.save()\n form.save_m2m()\n if is_event2019:\n mk_serviceinstance_formset.form = curry_class(ServiceInstanceForm, event=obj)\n services_formset = mk_serviceinstance_formset(request.POST, request.FILES, instance=instance)\n services_formset.is_valid()\n services_formset.save()\n return HttpResponseRedirect(reverse('events:detail', args=(obj.id,)))\n else:\n context['e'] = form.errors\n context['form'] = form\n if not services_formset.is_valid() and is_event2019:\n messages.add_message(request, messages.ERROR, \"Whoops! There was an error updating the services \"\n \"for this event.\")\n if is_event2019:\n context['services_formset'] = services_formset\n else:\n if is_event2019:\n context['form'] = InternalEventForm2019(request_user=request.user, instance=instance)\n context['services_formset'] = mk_serviceinstance_formset(instance=instance)\n else:\n context['form'] = InternalEventForm(request_user=request.user, instance=instance)\n if instance:\n context['msg'] = \"Edit Event\"\n else:\n context['msg'] = \"New Event\"\n\n return render(request, 'form_crispy_event.html', context)", "def render_creation_form(request: Request):\n return templates.TemplateResponse(\"creation_form.html\",{'request': request})", "def management_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n\n #verifies the person has access to the company or is an incubator employee\n edit = validate_user_company_access_or_redirect(request,company)\n\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n management_form = ManagementForm()\n return render_to_response('management_form.html', {'form': management_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n management_form = ManagementForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if management_form.is_valid():\n mf = management_form.save(commit=False)\n mf.company = company\n mf.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('management_form.html', \n {'form': management_form, 'form_errors': management_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def add_app(request):\n context = {}\n title = \"New Application\"\n if not request.user.has_perm('devices.manage_apps'):\n title = \"Request Application\"\n\n if request.method == 'POST':\n form = NewAppForm(data=request.POST, title=title, request_user=request.user)\n if form.is_valid():\n form.save()\n if title == \"Request Application\":\n message = request.user.name + \" has requested that you add \" + request.POST['name'] + \\\n \" to the list of available applications in the MDM Managed Software Library.<br><br>\" \\\n \"Log into the <a href='https://lnl.wpi.edu\" + reverse(\"mdm:list\") + \"'>MDM Console</a> to \" \\\n \"view or deny the request.\"\n email = GenericEmailGenerator(subject=\"New MacBook Software Request\", to_emails=settings.EMAIL_TARGET_W,\n body=message)\n email.send()\n messages.success(request, \"Your request has been submitted. The Webmaster will review it shortly.\")\n return HttpResponseRedirect(reverse(\"home\"))\n messages.success(request, \"Application added successfully!\")\n return HttpResponseRedirect(reverse(\"mdm:apps\"))\n else:\n form = NewAppForm(title=title, request_user=request.user)\n context['form'] = form\n context['msg'] = title\n return render(request, 'form_crispy.html', context)", "def acquisition_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n acquisition_form = AcquisitionForm()\n return render_to_response('acquisition_form.html', {'form': acquisition_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n acquisition_form = AcquisitionForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if acquisition_form.is_valid():\n aqf = acquisition_form.save(commit=False)\n aqf.company = company\n aqf.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('acquisition_form.html', \n {'form': acquisition_form, 'form_errors': acquisition_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def post(self, request):\n\n try:\n eventoid = request.POST.get('id', '')\n correo = request.POST.get('correo', '')\n AsigStaff.objects.create(id_Evento = eventoid, email_staff = correo)\n print(\"Exito en la asignaciรณn de staff\")\n except:\n print(\"Error en la asignacion de staff\")\n\n \n return render(request, self.template, self.context)\n #return render(request, self.template, self.context)", "def create_task(request):\n all_task_list = Todo.objects.all()\n form = TaskForm()\n if request.method == 'POST':\n form = TaskForm(request.POST)\n if form.is_valid():\n # create default todolist\n user = request.user if request.user.is_authenticated else None\n task = Todo(\n description=request.POST['description'],\n content= request.POST['content'],\n tesk_medium= request.POST['tesk_medium'],\n creator=user\n )\n task.save()\n return redirect('lists:alllist')\n else:\n return render(request, 'lists/index.html', {'form': form})\n\n context = {\n 'form': form, \n 'taskli':all_task_list\n }\n return render(request, 'lists/create_task.html',context )", "def todos_add(request, event_ident):\n try:\n event = Event.get_by_ident(event_ident)\n except Event.DoesNotExist:\n raise Http404('Event matching query does not exist.')\n\n dt = datetime.datetime\n timedelta = datetime.timedelta\n\n initial = []\n base = dt.now()\n if event.start and event.end:\n extra = 9\n else:\n extra = 10\n initial = [\n {\n 'title': 'Set date with host',\n 'due': dt.now() + timedelta(days=30),\n 'event': event,\n },\n ]\n\n TodoFormSet = modelformset_factory(TodoItem, form=SimpleTodoForm,\n extra=extra)\n\n formset = TodoFormSet(queryset=TodoItem.objects.none(), initial=initial + [\n {\n 'title': 'Set up a workshop website',\n 'due': base + timedelta(days=7),\n 'event': event,\n },\n {\n 'title': 'Find instructor #1',\n 'due': base + timedelta(days=14),\n 'event': event,\n },\n {\n 'title': 'Find instructor #2',\n 'due': base + timedelta(days=14),\n 'event': event,\n },\n {\n 'title': 'Follow up that instructors have booked travel',\n 'due': base + timedelta(days=21),\n 'event': event,\n },\n {\n 'title': 'Set up pre-workshop survey',\n 'due': event.start - timedelta(days=7) if event.start else '',\n 'event': event,\n },\n {\n 'title': 'Make sure instructors are set with materials',\n 'due': event.start - timedelta(days=1) if event.start else '',\n 'event': event,\n },\n {\n 'title': 'Submit invoice',\n 'due': event.end + timedelta(days=2) if event.end else '',\n 'event': event,\n },\n {\n 'title': 'Make sure instructors are reimbursed',\n 'due': event.end + timedelta(days=7) if event.end else '',\n 'event': event,\n },\n {\n 'title': 'Get attendee list',\n 'due': event.end + timedelta(days=7) if event.end else '',\n 'event': event,\n },\n ])\n\n if request.method == 'POST':\n formset = TodoFormSet(request.POST)\n if formset.is_valid():\n formset.save()\n messages.success(request, 'Successfully added a bunch of TODOs.',\n extra_tags='todos')\n return redirect(reverse(event_details, args=(event.get_ident(), )))\n else:\n messages.error(request, 'Fix errors below.')\n\n context = {\n 'title': 'Add standard TODOs to the event',\n 'formset': formset,\n 'helper': bootstrap_helper_inline_formsets,\n 'event': event,\n }\n return render(request, 'workshops/todos_add.html', context)", "def appointments(request):\n now = timezone.localtime(timezone.now())\n data = {}\n tables = {}\n rows = []\n seen = Appointment.objects.filter(seen_time__isnull=False).filter(\n checkin_date__iexact=now.date())\n # Today's COMPLETE patients\n complete = seen.filter(finish_time__isnull=False)\n for a in complete:\n d = {}\n d['id'] = a.id\n d['name'] = a.first_name + ' ' + a.last_name\n h, m, s = to_hms(get_waiting_time(a, now.time()))\n wait_time = \"\" + str(h) + \":\" + str(m) + \":\" + str(s)\n d['wait_time'] = wait_time\n rows.append(d)\n tables['Completed'] = rows\n rows = []\n # Today's IN_SESSION patients\n in_session = seen.filter(finish_time__isnull=True)\n for a in in_session:\n d = {}\n d['id'] = a.id\n d['name'] = a.first_name + ' ' + a.last_name\n h, m, s = to_hms(get_waiting_time(a, now.time()))\n wait_time = \"\" + str(h) + \":\" + str(m) + \":\" + str(s)\n d['wait_time'] = wait_time\n rows.append(d)\n tables['In Session'] = rows\n data['tables'] = tables\n return render(request, 'doctor/appointments.html', data)", "def create_view(request, title, modelform, **kwargs):\n instance_form = modelform(request.POST or None)\n if instance_form.is_valid():\n instance = instance_form.save(commit=False)\n for default in kwargs.keys():\n setattr(instance, default, kwargs[default])\n instance.save()\n messages.success(request, _(\"%s was created.\") % instance)\n return redirect(instance.get_absolute_url())\n return form(\n {**kwargs, \"form\": instance_form, \"action_name\": _(\"Create\"), \"title\": title},\n \"deployments/form.html\",\n request,\n )", "def __init__(self, *args, **kwargs):\n user = kwargs.pop('user')\n super(ChooseAppointmentForm, self).__init__(*args, **kwargs)\n if(user.first_name=='patient'):\n self.appointments = user.patient_appointment.all()\n appointment_partner = 'doctor' # patient is partnered with a doctor and vice versa\n else:\n self.appointments = user.doctor_appointment.all()\n appointment_partner = 'patient'\n choices = []\n\n for i, appointment in enumerate(self.appointments):\n partner_first_name = appointment.associated_patient.patient_user_profile.first_name if (appointment_partner=='patient') else appointment.associated_doctor.doctor_user_profile.first_name\n partner_last_name = appointment.associated_patient.patient_user_profile.last_name if (appointment_partner=='patient') else appointment.associated_doctor.doctor_user_profile.last_name\n choices.append((appointment, 'Appointment: {}, on {}, at {} with {} {}'\n .format(appointment.title, appointment.date, appointment.time, partner_first_name, partner_last_name)))\n\n self.fields['appointments'] = forms.ChoiceField(label=\"\", choices=choices, widget=forms.RadioSelect)", "def create():\n if request.method == 'POST':\n if request.form.get('title') and request.form.get('content'):\n entry = Entry.create(\n title = request.form.get('title'),\n content = request.form.get('content'),\n published = request.form.get('published') or False)\n flash('Entry created successfully!', 'success')\n if entry.published:\n return redirect(url_for('detail', slug=entry.slug))\n else:\n return redirect(url_for('edit', slug=entry.slug))\n else:\n flash('Title and Content are required!', 'danger')\n return render_template('create.html')", "def present_view(self, confirmation=False, error=None):\n if confirmation:\n input(\"The entry has been added. Press Enter to continue\")\n return\n if error:\n print(\n \"\\n** ERROR **\\n{}\\n\\nPlease try again\".format(\n \"\\n\".join(f\"{k}: {' '.join(v)}\" for k, v in error.messages.items())\n )\n )\n print(self._layout)\n task = {\n \"date\": input(\"Enter date (DD/MM/YYYY): \"),\n \"title\": input(\"Task Title: \"),\n \"time_spent\": input(\"Time spent (rounded minutes): \"),\n \"notes\": input(\"Notes (Optional): \"),\n }\n return task", "def community_post_create_view(request):\n task = \"Create New\"\n form = AddEditPostForm() # An unbound form\n\n if request.method == 'POST': # If the form has been submitted...\n form = AddEditPostForm(request.POST, request.FILES) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n post = form.save(commit=False) # Create a new object from the form, but don't save it to the database\n post.author = request.user # Set the author to the current user\n post.save() # Save the object to the database\n slug_str = \"%s %s\" % (post.title, post.date_posted) # Create a slug from the title and date\n post.slug = slugify(slug_str) # Create the slug\n post.save() # Save the object to the database\n return redirect('community-home') # Redirect to the home page\n\n context = { # Pass the variables to the template\n 'task': task,\n 'form': form,\n }\n return render(request,\n 'pages/patient-community/community-create-update-post.html',\n context) # render the patient community create post page", "def action_makeMeeting(self, cr, uid, ids, context=None):\n opportunity = self.browse(cr, uid, ids[0], context)\n res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'base_calendar', 'action_crm_meeting', context)\n res['context'] = {\n 'default_opportunity_id': opportunity.id,\n 'default_partner_id': opportunity.partner_id and opportunity.partner_id.id or False,\n 'default_partner_ids' : opportunity.partner_id and [opportunity.partner_id.id] or False,\n 'default_user_id': uid,\n 'default_section_id': opportunity.section_id and opportunity.section_id.id or False,\n 'default_email_from': opportunity.email_from,\n 'default_state': 'open',\n 'default_name': opportunity.name,\n }\n return res", "def add_view(self, request):\r\n instance_form = self.get_minimal_add_form()\r\n form = instance_form(request.POST, request.FILES, prefix=self.base_url())\r\n\r\n new_instance = None\r\n if form.is_valid():\r\n new_instance = form.save()\r\n template = select_template(self.item_add_template)\r\n context = RequestContext(request)\r\n context.update({\r\n \"insert\": self,\r\n \"form\": form,\r\n \"object\": new_instance\r\n })\r\n response = HttpResponse(template.render(context))\r\n response.status_code = 201\r\n return response\r\n response = HttpResponse(form.errors)\r\n response.status_code = 400\r\n return response", "def test_meeting_create(self):\n pass", "def get_project_add_form():\n\n return render_template(\"project_add.html\")", "def get_student_add_form():\n\n return render_template(\"student_add.html\")", "def add_event():\n check_admin()\n\n add_event = True\n\n form = EventForm()\n if form.validate_on_submit():\n event = Event(name=form.name.data, timeD = form.timeD.data, date = form.date.data, location = form.location.data,\n description=form.description.data, menus = 'menus/'+form.menu.data)\n try:\n # add event to the database\n db.session.add(event)\n db.session.commit()\n flash('You have successfully added a new event.')\n except:\n # in case event name already exists\n flash('Error: event name already exists.')\n\n # redirect to events page\n return redirect(url_for('admin.list_events'))\n\n # load event template\n return render_template('admin/events/event.html', action=\"Add\",\n add_event=add_event, form=form,\n title=\"Add Event\")", "def create(request):\n if request.method == \"POST\":\n form = InitialInvoice(data=request.POST)\n if form.is_valid():\n data = form.cleaned_data\n return render(request,\n \"invoice/invoice_create.html\",\n {\n \"form\": ItemForm(),\n \"stage\": \"2\",\n \"initial_data\": data\n })\n\n return render(request,\n \"invoice/invoice_create.html\",\n {\n \"form\": InitialInvoice(),\n \"stage\": \"1\"\n })", "def create_venue_submission():\n # parse POSTed form:\n venue_created = convert_form_dict_to_dict(request.form)\n # parse venue name:\n venue_name = venue_created[\"name\"]\n\n try:\n venue = Venue(**venue_created)\n db.session.add(venue)\n db.session.commit()\n # on successful db insert, flash success\n flash('Venue ' + venue_name + ' was successfully listed!')\n except:\n db.session.rollback()\n # on unsuccessful db insert, flash an error instead.\n flash('An error occurred. Venue ' + venue_name + ' could not be listed.')\n finally:\n db.session.close()\n \n return render_template('pages/home.html')", "def get_add_project_form():\n\n return render_template(\"project_add.html\")", "def appointment_date(begin_date):\n\n session.attributes['begin_date'] = str(begin_date)\n qs = render_template('time')\n return question(qs)", "def get(self, request, *args, **kwargs):\n organization_form = organization.forms.OrganizationForm()\n user_form = organization.forms.UserForm()\n # print(pet_form, pet_video_form)\n context = {'organization_form': organization_form,'user_form': user_form}\n context.update(django.core.context_processors.csrf(request))\n return django.shortcuts.render_to_response('organization/organization_insert.html', context)", "def _create_form(date, place, userid, invited, instance=None):\n print \"invited:{0}\".format(invited)\n matchdict = { 'place': place , 'date': date, 'creator': userid,\n 'invited': invited}\n return", "def add_bus():\n\n return render_template('bus-add-form.html')", "def follow_workoutplan(request, pk):\n return render(request, 'workouts/starting_date_form.html')", "def get_add_student_form():\n\n return render_template(\"student_add.html\")", "def submit_app_form():\n\n firstname = request.form.get(\"fstname\")\n lastname = request.form.get(\"lstname\")\n salary = request.form.get(\"salaryreq\")\n position = request.form.get(\"job\")\n\n return render_template(\"application-response.html\",\n fstname=firstname,\n lstname=lastname,\n salaryreq=salary,\n job=position,\n )", "def create_venue_submission():\n form = VenueForm(request.form)\n\n try:\n new_venue = Venue(\n name=form.name.data,\n city=form.city.data,\n state=form.state.data,\n address=form.address.data,\n phone=form.phone.data,\n genres=form.genres.data,\n facebook_link=form.facebook_link.data,\n image_link=form.image_link.data,\n website=form.website.data,\n seeking_talent=form.seeking_talent.data,\n seeking_description=form.seeking_description.data\n )\n\n db.session.add(new_venue)\n db.session.commit()\n\n flash('Venue ' + request.form['name'] + ' was successfully listed!', 'info')\n\n except Exception as ex:\n db.session.rollback()\n flash('Error occurred. Venue ' + request.form['name'] + ' could not be listed. ' + str(ex), 'danger')\n finally:\n db.session.close()\n\n return redirect(url_for('index'))", "def event(id):\n form = ContactForm()\n event = Event.query.get_or_404(id)\n other_media = {\"video\": event.video, \"misc_image_paths\": event.misc_images()}\n packages = event.packages.all()\n # commented out because the fake data generated for the demo of\n # this app by the Faker package may inadvertently contain real email addresses\n if form.validate_on_submit():\n # send_email(\n # organizer.email,\n # f\"Event Inquiry - {form.subject.data}\",\n # \"events/email/contact_organizer\",\n # organizer=organizer,\n # form=form,\n # event=event,\n # )\n flash(\"Your email was sent to the event organizer.\", \"success\")\n return redirect(url_for(\"events.event\", id=id))\n return render_template(\n \"events/event.html\",\n event=event,\n venue=event.venue,\n organizer=event.user,\n packages=packages,\n form=form,\n date_format=\"%m/%d/%Y\",\n main_image=event.main_image(),\n time_format=\"%I:%M %p\",\n other_media=other_media,\n )", "def display_form():\n\n return render_template('add_new_student.html')", "def post(self):\n global meetups\n user = users.get_current_user()\n if not self.request.get('meetid') in meetups:\n meeting = CanvasSheet()\n meeting.presenter = user\n meetups[self.request.get('meetid')] = meeting\n else:\n self.error(404)\n self.response.out.write('sorry no meeting found \"%s\"' % meetid)\n return", "def index(request):\n\tif request.method == 'POST':\n\t form = PatientRecordForm(request.POST)\n\t if form.is_valid():\n\t form.save()\n\t status = simplejson.dumps({'status':'success'})\n\t return HttpResponse(status, mimetype='application/json')\n\tform = PatientRecordForm(auto_id='%s')\n\treturn render_to_response('index.html', {'form': form})", "def __ui_add_new_activity(self):\n activity_id = int(input(\"Activity ID: \"))\n existing_persons_ids = self.__person_service.get_existing_persons_ids()\n string_of_participants_ids = input(\n f\"Participants' IDs (you can choose from the list: {existing_persons_ids})\\n > \")\n list_of_participants_ids = self.__ui_convert_ids_string_to_list(string_of_participants_ids)\n activity_description = input(\"Describe the activity: \")\n activity_date = {\n \"year\": int(input(\"Year: \")),\n \"month\": int(input(\"Month: \")),\n \"day\": int(input(\"Day: \"))\n }\n activity_time = int(input(\"Time: \"))\n\n self.__activity_service.service_add_activity(activity_id,\n list_of_participants_ids,\n activity_date,\n activity_time,\n activity_description)\n print(\"Activity successfully added to your agenda!\\n\")", "def add_hospital(request):\n if request.POST:\n post = request.POST\n name = post.get(\"name\")\n address = post.get(\"address\")\n city = post.get(\"city\")\n state = post.get(\"state\")\n zip = post.get(\"zip\")\n hospital = Hospital.objects.create(\n name=name,\n address=address,\n city=city,\n state=state,\n zip=zip\n )\n\n if hospital:\n return redirect('add_hospital')\n\n return render(request, 'add_hospital.html')", "def RegisterPopupForm(request, program_key, activity_key, users=None,\n notify='1', force_status='0'):\n # Get the schedules.\n schedules_query = models.Activity.SchedulesQueryFromActivityKey(activity_key)\n schedules_query.order('start_time')\n\n # Get the access point to load and make a list of schedules.\n schedules_list = []\n access_point_keys = set()\n access_points_secondary_keys = set()\n\n common_access_points = set()\n same_access_points = True\n\n for schedule in schedules_query:\n all_access_points = schedule.GetAllAccessPoints()\n if same_access_points:\n if not common_access_points:\n # We populate the set for the first time\n common_access_points.update(all_access_points)\n elif common_access_points != all_access_points:\n # Access points are different\n same_access_points = False\n\n schedules_list.append(schedule)\n access_point_keys.update(schedule.access_points)\n access_points_secondary_keys.update(schedule.access_points_secondary)\n\n access_point_keys.update(access_points_secondary_keys)\n # Load all the access points that are of interest.\n access_points = db.get(list(access_point_keys))\n assert None not in access_points\n access_points = dict(zip(access_point_keys, access_points))\n\n user = request.user\n schedule_info_list = []\n for schedule in schedules_list:\n schedule_info = {}\n\n # Format session times to display.\n schedule_info['key'] = str(schedule.key())\n schedule_info['start_time_local'] = user.GetLocalTime(schedule.start_time)\n\n # Add the access points that are available for each schedule.\n access_point_list = []\n for access_point_key in schedule.GetAllAccessPoints():\n access_point_display = str(access_points[access_point_key])\n if access_point_key in access_points_secondary_keys:\n access_point_display += ' (P)'\n access_point_list.append({'key': str(access_point_key),\n 'display': access_point_display})\n\n # sort access points by name\n schedule_info['access_point_list'] = sorted(access_point_list,\n key=lambda x: x['display'])\n\n # Add the schedule info to the list\n schedule_info_list.append(schedule_info)\n\n data = {'schedule_list': schedule_info_list,\n 'activity_key': activity_key,\n 'program_key': program_key,\n 'notify': notify,\n 'force_status': force_status}\n\n if same_access_points:\n data['common_access_points'] = schedule_info_list[0]['access_point_list']\n\n if users:\n data['users_count'] = len(users.split(','))\n data['users'] = users\n return data", "def schedule(request):\r\n\r\n return render(request, 'editorial/schedule.html', {})", "def appointment_list(self, request, **dict):\n\t\tdata = self.get_serializer(self.get_queryset(), many=True).data\n\t\treturn Response(data, status.HTTP_200_OK)", "def show_add_student_form():\n\n return render_template(\"add_student_form.html\")", "def reservs(request):\n a = request.GET\n print(a)\n if request.method == 'POST':\n # create a form\n form = NewReservationsOfficesForm(data=request.POST)\n if form.is_valid():\n form.save()\n return redirect('coworkings:index')\n else:\n form = NewReservationsOfficesForm()\n\n context = {\"form\": form}\n return render(request, 'coworkings/reservs.html', context)", "def save_view_form(\n request: HttpRequest,\n form: ViewAddForm,\n template_name: str,\n) -> JsonResponse:\n if request.method == 'POST' and form.is_valid():\n\n if not form.has_changed():\n return JsonResponse({'html_redirect': None})\n\n # Correct POST submission\n view = form.save(commit=False)\n view.workflow = form.workflow\n\n # Type of event to be recorded (before object is saved and ID is set)\n if form.instance.id:\n event_type = Log.VIEW_EDIT\n else:\n event_type = Log.VIEW_CREATE\n\n view.save()\n form.save_m2m() # Needed to propagate the save effect to M2M relations\n\n # Log the event\n Log.objects.register(\n request.user,\n event_type,\n view.workflow,\n {\n 'id': view.id,\n 'name': view.name,\n 'workflow_name': view.workflow.name,\n 'workflow_id': view.workflow.id})\n\n return JsonResponse({'html_redirect': ''})\n\n return JsonResponse({\n 'html_form': render_to_string(\n template_name,\n {'form': form, 'id': form.instance.id},\n request=request),\n })", "def application():\n\n return render_template(\"application-form.html\")", "def add_doctor(request):\n\n template = 'doctor_adding_form.html'\n #Check this in case url was typed directly in browser\n if request.user.is_staff and request.user.is_superuser:\n if request.method == \"POST\":\n form = DoctorForm(request.POST)\n if form.is_valid():\n form.save()\n else:\n return TemplateResponse(request, template, {'form': form})\n else:\n form = DoctorForm()\n return TemplateResponse(request, template, {'form': form})\n return HttpResponseRedirect(reverse('index'))", "def add_objective(request, teacher_email, teacher_class_id, date):\n teacher = Teacher.objects.get(email=teacher_email)\n if teacher.user != request.user:\n # weird mistake or evil to manipulate another person's data? start over\n return redirect('top.index')\n\n teacher_class = TeacherClass.objects.get(id=teacher_class_id)\n\n if request.POST:\n form = EntryForm(request.POST)\n if form.is_valid():\n try:\n with transaction.atomic():\n entry = form.save(commit=False)\n entry.teacher = teacher\n entry.teacher_class = teacher_class\n entry.date = datetime.datetime.strptime(date, '%Y-%m-%d')\n entry.save()\n start_of_week_datetime = entry.date - datetime.timedelta(days=entry.date.weekday())\n start_of_week = datetime.date(start_of_week_datetime.year, start_of_week_datetime.month,\n start_of_week_datetime.day)\n return redirect('teachers.views.dashboard', teacher_email=teacher_email,\n teacher_class_id=teacher_class_id, start_of_week=start_of_week)\n except IntegrityError:\n # bad bad bad; I guess the EntryForm has to be initialized\n # with the date and teacher so that its clean() method can look at it.\n form._errors['objective'] = ['This objective is already on the calendar for this day.']\n pass\n else:\n objectives = objectives_for_course(teacher_class.course_id, teacher_class.repo_provider)\n if not objectives:\n # XXX fail with an error message\n pass\n # like EntryForm() above, but dynamically created to use a selection\n # of objectives specific to this course\n form = create_entry_form(objectives)\n\n args = {'teacher_email': teacher_email,\n 'teacher_class_id': teacher_class_id,\n 'dashboard_emails': get_dashboard_emails(request),\n 'date': date}\n args.update(csrf(request))\n args['form'] = form\n return render(request, 'teachers/add_objective.html', args)", "def application_page():\n return render_template(\"application-form.html\")", "def event_add(request,event=None, error='', message=''):\n categories = Category.objects.all()\n error_fields=[]\n\n try:\n widget = bool(request.POST['widget'])\n except:\n widget = False\n if widget == True:\n url = request.POST['url']\n detail = request.POST['description']\n title = request.POST['title']\n try:\n timestamp = request.POST['timestamp']\n datetimestamp = parse(timestamp)\n except:\n datetimestamp = datetime.datetime.now()\n debug.onscreen('WARNING: Widget returned datetime we couldn\\'t process. Defaulting to today.')\n debug.onscreen('Autocompleting form from widget... ' + url + str(timestamp) + title)\n default_event = Event(date=datetimestamp.date(), title=title, detail=detail, category=Category.objects.filter(description='Found on the internet')[0], user_email='')\n else:\n default_event = Event(date=datetime.date.today(), title='', detail='', category=Category.objects.filter(description='Events')[0], user_email='')\n\n try:\n added = bool(request.POST['add'])\n except:\n added = False\n try:\n action = request.POST['action']\n except:\n action = 'add'\n\n if added == True:\n try:\n new_date = parse(request.POST['date'], dayfirst=True)\n except:\n error += ' Date invalid or not specified.'\n\n try:\n new_detail = request.POST['detail']\n if new_detail == '':\n error += ' Event text is blank.'\n except:\n error += ' No event text provided.'\n\n try:\n new_title = request.POST['title']\n if new_title == '':\n error += ' Title is blank.'\n except:\n error += ' No event title provided.'\n\n try:\n new_category = Category.objects.filter(pk=int(request.POST['category_id']))[0] #The [0] is OK since the fact that category_id is a primary key ensures that the array has only length 1.\n except:\n error += ' Category invalid or nonexistent.'\n\n try:\n new_user_email = request.POST['user_email']\n if new_user_email == '':\n error += ' You haven\\'t provided your e-mail address.'\n except:\n error += ' No user e-mail address provided.'\n\n if error == '':\n new_event = Event(date=new_date, title=new_title, detail=new_detail, category=new_category, user_email=new_user_email)\n try:\n new_event.full_clean()\n try:\n new_event.save()\n message += 'Your event was added to the database.'\n except:\n error += 'Failed to access the database.'\n except ValidationError as ve:\n for k in ve.message_dict.keys():\n error_fields.append(k)\n for m in ve.message_dict[k]:\n error += m + ' '\n default_event = new_event\n\n if action == 'saveandaddanother' or action == 'add' or error != '':\n return render_to_response('feedback/event_add.html',\n {'categories': categories,\n 'error': error,\n 'added': added,\n 'message': message,\n 'error_fields': error_fields,\n 'event': default_event},\n context_instance=RequestContext(request))\n elif action == 'save':\n return index(request, error=error, message=message)\n else:\n error += 'Invalid submit action requested.'\n return render_to_response('feedback/event_add.html',\n {'categories': categories,\n 'error': error,\n 'added': added,\n 'message': message,\n 'error_fields': error_fields,\n 'event': default_event},\n context_instance=RequestContext(request))", "def create_or_edit_event(request, calendar_id=None, event_id=None, redirect=None):\n instance = None\n if event_id:\n instance = get_object_or_404(Event, id=event_id)\n calendar = None\n if calendar_id is not None:\n calendar = get_object_or_404(Calendar, id=calendar_id)\n form = EventForm(data=request.POST or None, instance=instance, hour24=True)\n if form.is_valid():\n event = form.save(commit=False)\n if instance is None:\n event.creator = request.user\n event.save()\n if calendar is not None and instance is None:\n calendar.events.add(event)\n next = redirect or reverse('s_event', args=[event.id])\n if 'next' in request.GET:\n next = _check_next_url(request.GET['next']) or next\n return HttpResponseRedirect(next)\n return render_to_response('schedule/create_event.html', {\n \"form\": form,\n \"calendar\": calendar\n }, context_instance=RequestContext(request))", "def issueCreate(request):\n args = { 'statusForm' : forms.itemStatusForm(), }\n return render_to_string('issueCreate.html', args,\n context_instance=RequestContext(request))", "def create_challenge(request):\n\tif request.method == \"POST\":\n\t\tselected_schedule_pk = request.POST[\"schedule-result-selected\"]\n\t\t\n\t\tselected_schedule = ReadingSchedule.objects.get(pk = selected_schedule_pk)\n\t\t\n\t\tnew_challenge = Challenge()\n\t\tnew_challenge.name = request.POST[\"challenge-name\"]\n\t\tnew_challenge.schedule = selected_schedule\n\t\tnew_challenge.schedule_name = selected_schedule.title\n\t\tif(\"challenge-is-private\" in request.POST):\n\t\t\tnew_challenge.invite_only = request.POST[\"challenge-is-private\"]\n\t\telse:\n\t\t\tnew_challenge.invite_only = False\n\t\tnew_challenge.save()\n\t\tnew_challenge.admin.add(request.user)\n\t\t\n\t\t\n\t\t\n\t\tmessages.success(request, \"Successfully created a challenge\")\n\t\treturn redirect(\"/challenge\")\n\t\t\n\telse:\n\t\tall_schedules = ReadingSchedule.objects.filter(start_date__gte = datetime.datetime.today())\n\t\t#turn into JSON for selector\n\t\tlist_of_sched = []\n\t\tfor schedule in all_schedules:\n\t\t\tlist_of_sched.append({ 'name' : schedule.title, 'date' : parse_date_to_string(schedule.start_date), 'pk' : schedule.pk })\n\t\t\n\t\tprint(json.dumps(list_of_sched))\n\t\t\n\t\tcontext = RequestContext(request, {\"all_schedule_json\" : json.dumps(list_of_sched)})\n\t\treturn render_to_response(\"encourage/create_challenge.html\", context)", "def create_appointments(\n data: AppointmentCreate,\n background_tasks: BackgroundTasks, \n user: User = Depends(deps.get_user),\n db: Session = Depends(deps.get_db),\n rdc: RedisCache = Depends(deps.get_redis)\n) -> Any:\n db_provider = crud_user.get_user_by_id(db, str(data.provider_id))\n if not db_provider:\n raise HTTPException(\n status_code=404, \n detail=\"Cabeleireiro nรฃo encontrado\"\n )\n\n current_date = datetime.now()\n compare_date = data.date.replace(tzinfo=None)\n if compare_date < current_date:\n raise HTTPException(\n status_code=400, \n detail=\"Vocรช nรฃo pode marcar agendamento em datas passadas\"\n )\n \n if data.date.hour < 8 or data.date.hour > 17:\n raise HTTPException(\n status_code=400, \n detail=\"Vocรช sรณ pode cria agendamentos entre 8:00 e 17:00\"\n )\n\n if data.provider_id == user.id:\n raise HTTPException(\n status_code=400, \n detail=\"Vocรช nรฃo pode marca agendamento consigo mesmo\"\n )\n\n validate_date = crud_appointment.get_appointment_by_date(db, data.provider_id, data.date)\n if validate_date:\n raise HTTPException(status_code=400, detail=\"Este horario jรก esta agendado\")\n\n appointment = crud_appointment.create(db, data, user)\n msg = f\"Novo agendamento de {user.name} {user.surname} para o {date.format_date(data.date)}\"\n background_tasks.add_task(crud_notification.create, str(data.provider_id), msg)\n date_time = data.date\n rdc.invalidate_cache(\n f\"providers-appointments:{data.provider_id}:{date_time.year}:{date_time.month}:{date_time.day}\"\n )\n rdc.invalidate_cache(f\"user-appointments:{user.id}\")\n\n return appointment", "def test_admin_calendar_admin_add(self):\n response = self.client.get(\"/admin/appointment/calendar/add/\")\n self.assertEqual(response.status_code, 200)", "def make_petition(request):\n form_class = SongPetitionForm\n template_name = 'petition/make_petition'\n\n if request.method == 'POST':\n petition = SongPetition(song_title=request.user)\n form = SongPetitionForm(request.POST, instance=petition)\n # If all the data has been filled in correctly it saves the petition into the DB\n if form.is_valid():\n form.save()\n # Feedback message telling that the petition has been created\n messages.success(request, \"Petition succesfully created\")\n return redirect ('petition:list')\n\n else:\n form = SongPetitionForm()\n return render(request, 'petition/make_petition.html', {'form': form})", "def create_venue_submission():\n # TODO: insert form data as a new Venue record in the db, instead (DONE)\n # TODO: modify data to be the data object returned from db insertion\n\n try:\n name = request.form.get(\"name\")\n city = request.form.get(\"city\")\n state = request.form.get(\"state\")\n address = request.form.get(\"address\")\n phone = request.form.get(\"phone\")\n imageLink = request.form.get(\"image_link\")\n genres = request.form.getlist(\"genres\")\n facebookLink = request.form.get(\"facebook_link\")\n website = request.form.get(\"website\")\n seeking_talent = request.form.get(\"facebook_link\")\n seeking_description = request.form.get(\"facebook_link\")\n\n venue_to_add = Venue(\n name=name,\n city=city,\n state=state,\n address=address,\n phone=phone,\n image_link=imageLink,\n genres=genres,\n facebook_link=facebookLink,\n website=website,\n seeking_talent=seeking_talent,\n seeking_description=seeking_description\n )\n\n db.session.add(venue_to_add)\n db.session.commit()\n\n # on successful db insert, flash success\n flash(\"Venue \" + request.form[\"name\"] + \" was successfully listed!\")\n\n # TODO: on unsuccessful db insert, flash an error instead.\n # e.g., flash('An error occurred. Venue ' + data.name + ' could not be listed.')\n except:\n flash(\"An error occurred. Venue \" + name + \" could not be listed.\")\n db.session.rollback()\n finally:\n db.session.close()\n # see: http://flask.pocoo.org/docs/1.0/patterns/flashing/\n return render_template(\"pages/home.html\")", "def matchScheduling(request): \n registered = False\n if request.method == 'POST':\n scheduling_form = MatchSchedulingForm(data=request.POST)\n if scheduling_form.is_valid():\n scheduling = scheduling_form.save()\n registered = True\n else:\n print(scheduling_form.errors)\n else:\n scheduling_form = MatchSchedulingForm()\n matches_obj = MatchScheduling.objects.all()\n return render(request,'footBallApp/match_scheduling.html',\n {'scheduling_form':scheduling_form,\n 'registered':registered, 'matches_obj':matches_obj})", "def create_employee(request, company_id):\n\n company = Company.objects.get(pk=company_id)\n current_employee = Employee.objects.get(user__pk=request.user.pk)\n if not current_employee.isEnsoUser() and current_employee.company.pk != company.pk:\n logUnauthorizedAccess(\"User tried to create_employee\", request)\n raise PermissionDenied()\n form = EmployeeForm(request, initial=dict(company=company))\n form.fields['manager'].queryset = Employee.objects.filter(is_manager=True, company=company)\n # form.fields['development_plan_type'].queryset = DevelopmentPlanType.objects.filter(\n # Q(company=company) | Q(company__isnull=True))\n # data = {\n # 'employee_form': form.cleaned_data,\n # 'company': company.cleaned_data[\"name\"]\n # }\n\n return TemplateResponse(\n request,\n 'mus/create_employee_form.html',\n {\n 'employee_form': form,\n }\n )\n # data = {\n # 'employee_form': form.cleaned_data,\n # 'company': company.cleaned_data[\"name\"]\n # }\n # return JsonResponse(status=200, data=data)", "def add_form():\r\n\r\n if request.args['collection'] == 'recipe':\r\n # initializes page title and header\r\n page_title = 'Add recipe'\r\n page_header = 'Add a new recipe:'\r\n\r\n # returns the add recipe template\r\n return render_template(\r\n 'add_form.html',\r\n collection=mongo.db.recipe_categories.find().sort('name'),\r\n categories=mongo.db.appliance_categories.find().sort('name'),\r\n page_title=page_title,\r\n page_header=page_header\r\n )\r\n\r\n elif request.args['collection'] == 'category':\r\n # initializes page title and header\r\n page_title = 'Add recipe category'\r\n page_header = 'Add a new recipe category:'\r\n\r\n # returns the add recipe category template\r\n return render_template(\r\n 'add_form.html',\r\n page_title=page_title,\r\n page_header=page_header\r\n )\r\n\r\n else:\r\n # returns an error message on incorrect argument\r\n return render_template(\r\n 'error.html',\r\n msg='Bad argument error! (/add_form)'\r\n )", "def post(self):\n\n try:\n\n controller = self.controller()\n kwargs = controller.date_time_parser(request.json)\n schema = self.schema(many=False)\n raw_data = controller.create(**kwargs)\n data = schema.dump(raw_data)\n\n return ResponseHandler.render_response(data=data)\n\n except Exception as ex:\n\n return ResponseHandler.render_response(status=ERR, message=traceback.format_exc())", "def customer_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n customer_form = CustomerForm()\n return render_to_response('customer_form.html', {'form': customer_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n customer_form = CustomerForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if customer_form.is_valid():\n of = customer_form.save(commit=False)\n of.company = company\n of.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('customer_form.html', \n {'form': customer_form, 'form_errors': customer_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def meeting(request, meeting_id):\n meeting = get_object_or_404(Meeting, pk=meeting_id)\n context = {'meeting': meeting}\n return render(request, 'sacms/meeting.html', context)", "def view_appointments(request):\n\n appointments = Appointment.objects.all().order_by('date')\n\n if request.user.userprofile.is_patient():\n appointments = Appointment.objects.filter(patient=request.user.id).order_by('date')\n\n elif request.user.userprofile.is_doctor():\n appointments = Appointment.objects.filter(doctor=request.user.id).order_by('date')\n\n return render(request, 'view_appointments.html', {'appointments': appointments,\n 'the_user': request.user})", "def populate_create_event_form(form, venue, event):\n # Venue info\n venue = event.venue\n form.venue_name.data = venue.name\n form.address.data = venue.address\n form.city.data = venue.city\n form.state.data = CreateEventForm.convert_choice_to_id(venue.state, \"STATES\")\n form.zip_code.data = venue.zip_code\n\n # Event Info\n form.title.data = event.title\n form.event_type.data = event.event_type.id\n form.category.data = event.event_category.id\n form.start_date.data = event.start_date()\n form.end_date.data = event.end_date()\n form.start_time.data = CreateEventForm.convert_choice_to_id(\n event.start_time(), \"TIMES\"\n )\n form.end_time.data = CreateEventForm.convert_choice_to_id(event.end_time(), \"TIMES\")", "def get(self, request):\n self.context[\"form\"] = AddUserForm()\n return render(request, \"dbkeeper/add.html\", self.context)", "def practices_create():\n practice = Practice()\n form = PracticeCreateForm()\n if form.validate_on_submit():\n\n form.populate_obj(practice)\n db.session.add(practice)\n db.session.commit()\n return redirect(url_for('practices.home'))\n return render_template('practices/create.html', form=form)", "def create_new_availability():\n if request.method == 'POST':\n doctor_email = request.form['doctor_email']\n date = request.form['date']\n start_time = request.form['start_time']\n end_time = request.form['end_time']\n\n response_add_availability = requests.post(server_url + 'doctor/add_availability', json={\n 'doctor_email': doctor_email,\n 'date': date,\n 'start_time': start_time,\n 'end_time': end_time\n })\n response_add_availability = response_add_availability.json()\n\n if response_add_availability.get('Status') == \"ALREADY_AVAILABILITY_SET\":\n return render_template('doctors/availability_failed.html')\n else:\n referer = request.referrer\n return render_template('doctors/availability_success.html', referer=referer)\n else:\n return render_template('doctors/dashboard.html')", "def entertips(request):\n\n #Add views\n if request.method == 'POST':\n form = SportsTippingForm(request.POST)\n if form.is_valid():\n post = form.save(commit=False)\n post.save()\n return redirect('tipresults')\n\n else:\n form = SportsTippingForm()\n\n context = {\n 'form': form,\n }\n\n return render(request, 'enterSportsTips.html', context=context)", "def create_todo_list_view(request: HttpRequest) -> Union[HttpResponse, HttpResponseRedirect]:\n if request.method == 'GET':\n form = TodoListForm()\n\n return render(request, 'todo/create_todo_list.html', {'form': form})\n elif request.method == 'POST':\n form = TodoListForm(data=deepcopy(request.POST))\n\n if form.is_valid():\n todo_list = form.save()\n\n return redirect(todo_list.get_absolute_url())\n else:\n return render(request, 'todo/create_todo_list.html', {'form': form})", "def application_submission():\n # firstname = firstname\n firstname = request.form.get(\"firstname\")\n lastname = request.form.get(\"lastname\")\n jobappliedfor = request.form.get(\"jobappliedfor\")\n salaryrequirement = request.form.get(\"salaryrequirement\")\n # trying to create a workaround to handle people accessing the\n # application page directly (meaning no parameters are passed)\n # ideas on lines 33/41/42, and 43/44\n # if firstname == \"firstname\":\n # return redirect(\"/application-form\")\n # if not request.form.get(\"firstname\")\n # return redirect(\"/application-form\")\n return render_template(\"application-response.html\", firstname=firstname,\n lastname=lastname, jobappliedfor=jobappliedfor,\n salaryrequirement=salaryrequirement)", "def submit_show(request):\n\n\tunknown_dj = BaseUser.objects.get(first_name__iexact='Unknown', last_name__iexact='Dj')\n\n\tfirst_name = request.POST.get('first_name').strip()\n\tlast_name = request.POST.get('last_name').strip()\n\temail = request.POST.get('email').strip()\n\tshow_name = request.POST.get('show_name').strip()\n\tgenre = request.POST.get('genre').strip()\n\ttagline = request.POST.get('tagline').strip()\n\tfirst_choice_day = request.POST.get('first_choice_day')\n\tfirst_choice_time = request.POST.get('first_choice_time')\n\tsecond_choice_day = request.POST.get('second_choice_day')\n\tsecond_choice_time = request.POST.get('second_choice_time')\n\tthird_choice_day = request.POST.get('third_choice_day')\n\tthird_choice_time = request.POST.get('third_choice_time')\n\tco_dj = request.POST.get('co_dj')\n\n\tfull_name = first_name + \" \" + last_name\n\n\tif not first_choice_time == \"\":\n\t\tfirst_choice_time = datetime.strptime(first_choice_time, '%I:%M %p').time()\n\t\tif not time_is_valid(request, first_choice_day, first_choice_time, full_name):\n\t\t\treturn render(request, 'invalid_times.html', {})\n\n\n\tif not second_choice_time == \"\":\n\t\tsecond_choice_time = datetime.strptime(second_choice_time, '%I:%M %p').time()\n\t\tif not time_is_valid(request, second_choice_day, second_choice_time, full_name):\n\t\t\treturn render(request, 'invalid_times.html', {})\n\n\tif not third_choice_time == \"\":\n\t\tthird_choice_time = datetime.strptime(third_choice_time, '%I:%M %p').time()\n\t\tif not time_is_valid(request, third_choice_day, third_choice_time, full_name):\n\t\t\treturn render(request, 'invalid_times.html', {})\n\n\t# Finds the DJ in the BaseUser database and saves their email\n\tdj = BaseUser.objects.filter(first_name__iexact=first_name, last_name__iexact=last_name).first()\n\n\tif not dj:\n\t\tfirst_name_matches = BaseUser.objects.filter(first_name__iexact=first_name)\n\t\tlast_name_matches = BaseUser.objects.filter(last_name__iexact=last_name)\n\t\t# merge the two together into unique query set\n\t\tpotential_results = first_name_matches | last_name_matches\n\t\t# takes user to a page asking if any of the names given is theirs if their name cannot be found\n\t\t# in BaseUser database\n\t\treturn render(request, 'not_in_database.html', {\n\t\t\t'potential_results': potential_results\n\t\t})\n\n\tdj.email = email\n\tdj.save()\n\n\n\t# Adds co-dj to show if co-dj exists\n\tif co_dj == '':\n\t\tco_dj = None\n\telse:\n\t\tco_dj_full_name = co_dj.lower().split()\n\t\tfname = co_dj_full_name[0]\n\t\tlname = co_dj_full_name[1]\n\t\tco_dj = BaseUser.objects.filter(first_name__iexact=fname, last_name__iexact=lname).first()\n\n\t\t# take into consideration if Co DJ doesn't exist\n\t\tif not co_dj:\n\t\t\t# If any unknown_djs are in the database, check to see why this user doesn't exist\n\t\t\tco_dj = unknown_dj\n\n\n\t# Create Show for dj if show doesn't exist\n\tshow = Show.objects.filter(dj=dj).first()\n\n\tif not show:\n\t\tshow = Show(show_name=show_name, dj=dj, co_dj=co_dj, genre=genre, tagline=tagline)\n\t\tshow.save()\n\n\n\t# Saves dj's choices in case they get bumped by someone with higher credits in the Credits database\n\tfirst_choice = Choice(show=show, choice_num=0, day=first_choice_day,\n\t\t\t\t\t\t time=first_choice_time)\n\tfirst_choice.save()\n\n\tchoices = [first_choice]\n\n\tif second_choice_day and second_choice_time:\n\t\tsecond_choice = Choice(show=show, choice_num=1, day=second_choice_day,\n\t\t\t\t\t\t\t time=second_choice_time)\n\t\tsecond_choice.save()\n\n\t\tchoices.append(second_choice)\n\n\tif third_choice_day and third_choice_time:\n\t\tthird_choice = Choice(show=show, choice_num=2, day=third_choice_day,\n\t\t\t\t\t\t\t time=third_choice_time)\n\t\tthird_choice.save()\n\n\t\tchoices.append(third_choice)\n\n\n\tformat = '%H:%M %p'\n\n\t# This variable checks what choice we are on while looping through the choices given by the dj\n\ti = 0\n\n\t# Check if any of those choices are already taken\n\tfor choice in choices:\n\t\ti += 1\n\t\tdjs_with_this_choice = Show.objects.filter(day=choice.day, time=choice.time).values_list('dj', flat=True)\n\t\tdj_with_time = BaseUser.objects.filter(id__in=djs_with_this_choice).order_by('credits').first()\n\n\t\texisting_show = Show.objects.filter(dj=dj_with_time).first()\n\n\t\t# Compares the number of credits our current user submitting show has against the other user's\n\t\t# which currently has this showtime.\n\t\tif existing_show and (existing_show.dj != dj):\n\t\t\tother_dj_credits = dj_with_time.credits\n\n\t\t\tif other_dj_credits < dj.credits:\n\n\t\t\t\t# Current user steals the other dj's time\n\t\t\t\tshow.day = choice.day\n\t\t\t\tshow.time = choice.time\n\t\t\t\tshow.save()\n\n\t\t\t\texisting_show.day = None\n\t\t\t\texisting_show.time = None\n\t\t\t\texisting_show.save()\n\n\t\t\t\t# Marks this show time as unavailable for anybody else who has this choice\n\t\t\t\tother_dj_choices = Choice.objects.filter(day=choice.day, time=choice.time)\n\t\t\t\tother_dj_choices.update(not_available=True)\n\n\t\t\t\treturn render(request, 'thank_for_submissions.html', {})\n\t\t\telse:\n\t\t\t\tchoice.not_available = True\n\t\t\t\tchoice.save()\n\n\t\t\t\t# All choices are taken at this point, so redirect user to\n\t\t\t\t# a page that asks for more choices\n\t\t\t\tif i == 3:\n\t\t\t\t\treturn render(request, 'additional_times.html', {\n\t\t\t\t\t\t'dj': dj,\n\t\t\t\t\t\t'choices': choices,\n\t\t\t\t\t})\n\n\t\t# Time is free and can be assigned to user's show\n\t\telse:\n\t\t\tshow.day = choice.day\n\t\t\tshow.time = choice.time\n\t\t\tshow.save()\n\n\t\t\tother_dj_choices = Choice.objects.filter(day=choice.day, time=choice.time)\n\t\t\tother_dj_choices.update(not_available=True)\n\n\t\t\treturn render(request, 'thank_for_submissions.html', {})\n\n\treturn render(request, 'additional_times.html', {\n\t\t'dj': dj,\n\t\t'choices': choices,\n\t})", "def look_vacant_offices(request):\n if request.GET:\n if request.GET['datetime_from'] and request.GET['datetime_to']:\n offices = NumberOffice.objects.all()\n reservations = Reservation.objects.all()\n post_from = request.GET['datetime_from']\n post_to = request.GET['datetime_to']\n filteroffice = reservations.all().filter(\n datetime_from__gte=post_from, datetime_to__lte=post_to\n )\n reservednumberoffice = set()\n # set reserved office for corect time\n for i in filteroffice:\n reservednumberoffice.add(i.number_office)\n context = {'offices': offices, \"reservednumberoffice\": reservednumberoffice}\n return render(request, 'coworkings/vacant_offices.html', context)\n else:\n text = 'Enter the correct data or fill in all fields.'\n context = {'text': text}\n return render(request, 'coworkings/look_vacant_offices.html', context)\n else:\n return render(request, 'coworkings/look_vacant_offices.html')", "def post(self, request):\n form = EventoForm(request.POST)\n print(form)\n if form.is_valid():\n print(\"creado\") \n\n\n self.context['form'] = form\n\n return redirect(\"Home:home\")", "def certification_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n certification_form = CertificationForm()\n return render_to_response('certification_form.html', {'form': certification_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n certification_form = CertificationForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if certification_form.is_valid():\n of = certification_form.save(commit=False)\n of.company = company\n of.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('certification_form.html', \n {'form': certification_form, 'form_errors': certification_form.errors, 'company':company},\n context_instance=RequestContext(request))" ]
[ "0.7959271", "0.7628927", "0.7594305", "0.75070536", "0.6968844", "0.6689058", "0.65166914", "0.6325348", "0.62968016", "0.6250767", "0.6221335", "0.6216348", "0.62076926", "0.6130234", "0.6045082", "0.59900767", "0.59859955", "0.5985113", "0.5973313", "0.5966656", "0.5902615", "0.5900501", "0.58078206", "0.5755915", "0.57498676", "0.57422084", "0.5738293", "0.5736338", "0.57353824", "0.57277113", "0.57175505", "0.57119286", "0.5709236", "0.5707384", "0.5687987", "0.56865466", "0.56738883", "0.5667575", "0.5655502", "0.5647307", "0.5644421", "0.56440127", "0.5635356", "0.56258243", "0.55860174", "0.5579241", "0.5574762", "0.5570795", "0.55639124", "0.55611897", "0.5550077", "0.55467254", "0.5544887", "0.5521491", "0.5516069", "0.5510722", "0.55079544", "0.5501982", "0.5491643", "0.5483642", "0.54708135", "0.5467935", "0.54661846", "0.54587966", "0.54479325", "0.5446152", "0.5430533", "0.5422766", "0.5413438", "0.5411042", "0.54088897", "0.54028296", "0.5401175", "0.5396223", "0.53940135", "0.53900063", "0.53876334", "0.53872675", "0.5381616", "0.53785014", "0.53735", "0.53728193", "0.5370257", "0.5367196", "0.53594273", "0.5354823", "0.53401196", "0.53360975", "0.53348875", "0.5323581", "0.5319187", "0.5317167", "0.5314671", "0.53127575", "0.5311917", "0.53082585", "0.53062576", "0.52948284", "0.5290948", "0.52884793" ]
0.5967904
19
Delete an appointment from the appointment table
def delete_appointment(request, appointment_id): appointment_id = appointment_id if not appointment_id: return HttpResponse("Please provide an appointment Id"), 406 try: appointment = Appointment.objects.get(id=int(appointment_id)) except: return HttpResponse("No appointment with that ID exist"), 404 appointment.delete() return HttpResponse("Successfully Deleted")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_patient_appointment():\n if request.method == 'POST':\n appointment_id = request.form['appointment_id']\n response_delete_patient_appointment = requests.post(server_url + 'patient/delete_appointment', json={\n 'appointment_id': appointment_id\n })\n response_delete_patient_appointment = response_delete_patient_appointment.json()\n if response_delete_patient_appointment.get('Status') == 'SUCCESS':\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return \"An error occurred deleting the appointment\"", "def remove_appointments():\n appointments = Appointment.objects.all()\n now = timezone.now()\n for appointment in appointments:\n if appointment.date < now:\n appointment.delete()", "def delete(self, application_id):", "def clerk_delete_appointment():\n if request.method == 'POST':\n appointment_id = request.form['appointment_id']\n\n response_clerk_delete_appointment = requests.post(server_url + 'medical_clerk/delete_appointment', json={\n 'appointment_id': appointment_id\n })\n response_clerk_delete_appointment = response_clerk_delete_appointment.json()\n\n if response_clerk_delete_appointment.get('Status') == \"SUCCESS\":\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return \"An error occurred deleting the appointment\"", "def cancel_appointment(request, id):\n appointment = get_object_or_404(Appointment, pk=id)\n\n if request.POST:\n appointment.delete()\n messages.add_message(request, messages.SUCCESS, 'The appointment has been canceled successfully.')\n return redirect('view_appointments')\n\n return render(request, 'cancel_appointment.html', {'appointment': appointment})", "def remove_by_person_id(self, p_id):\r\n self.__repo.remove_appointment_by_person_id(p_id)", "def remove_by_activity_id(self, a_id):\r\n self.__repo.remove_appointment_by_activity_id(a_id)", "def delete(id_patient: str):\n database = get_connection()\n col = database.patients\n query = {\"patient_data.id\": id_patient}\n col.delete_one(query)", "def del_apt(cal, c_id, apt_id):\n\n cal.events().delete( # pylint: disable=maybe-no-member\n calendarId=c_id,\n eventId=apt_id\n ).execute()", "def delete(self, _id):", "def delete_alarm():\r\n name = request.args.get('alarm_item')\r\n logging.info(\"Alarm deleted in delete_alarm(): \" + name)\r\n for alarm in alarms:\r\n if alarm['title'] == name:\r\n alarms.remove(alarm)", "def test_meeting_delete(self):\n pass", "def delete_medical_record_for_patient():\n if request.method == 'POST':\n medical_record_id = request.form['medical_record_id']\n\n response_delete_medical_record = requests.post(server_url + 'doctor/delete_medical_record', json={\n 'medical_record_id': medical_record_id\n })\n response_delete_medical_record = response_delete_medical_record.json()\n if response_delete_medical_record.get('Status') == \"SUCCESS\":\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return \"An error occurred deleting the appointment\"", "def delete_event(event):\n with session_scope(DBSession) as session:\n event_to_delete = session.query(Event).filter(Event.user_id == event.user_id,\n Event.calendar_id == event.calendar_id,\n Event.id == event.id).first()\n session.delete(event_to_delete)", "def deleteAgenda():\n data = request.json\n if \"agenda_id\" in data:\n connectMongo.deleteAgenda(data.get(\"agenda_id\"))\n return jsonify(response=200, msg=\"Agenda has been deleted\")\n else:\n return jsonify(response=400, msg=\"you didn't sent all the necessary information\")", "def _delete (self):\n self._exec ('delete from table_name where id=%(id)s')", "def delete_app(AppId=None):\n pass", "def delete_podcast(_id):\r\n Podcast.query.filter_by(id=_id).delete()\r\n # filter podcast by id and delete\r\n db.session.commit() # commiting the new change to our database\r", "def delete_event(event_id):\n connection = get_connection()\n cursor = connection.cursor()\n sql_string = \"DELETE FROM Event WHERE eventID =\"+str(event_id)\n cursor.execute(sql_string)\n connection.commit()", "def delete_event(id):\n event = Event.query.get(id)\n if not current_user.is_organizer(event) and not current_user.is_administrator():\n return redirect(url_for(\"main.index\"))\n db.session.delete(event)\n db.session.commit()\n return jsonify({\"message\": \"Your event has been successfully deleted.\"})", "def delete_meetup(meetup_id):\n query = \"DELETE FROM meetups WHERE meetup_id= '{}';\".format(meetup_id)\n cur.execute(query)\n db.conn.commit()", "def delete():", "def delete_event(eid):\n event = EventModel.query.get_or_404(eid)\n db.session.delete(event)\n db.session.commit()\n return ''", "def _DeleteEvent(self, event):\n\n self.cal_client.Delete(event.GetEditLink().href)", "def delete_activity():\n pass", "def delete(self):\n\n if not self.context.model.is_editable():\n raise Unauthorized(\"Editing is not allowed\")\n\n # the agenda_item is ad hoc if it has a document but no proposal\n if self.agenda_item.has_document and not self.agenda_item.has_proposal:\n document = self.agenda_item.resolve_document()\n trasher = ITrashable(document)\n trasher.trash()\n\n self.agenda_item.remove()\n\n return JSONResponse(self.request).info(\n _(u'agenda_item_deleted',\n default=u'Agenda Item Successfully deleted')).dump()", "def delete_entry(self, scenario_info):\n sql = self.delete(\"id\")\n self.cur.execute(sql, (scenario_info[\"id\"],))", "def booking_delete(id):\n booking = Booking.query.get(id)\n payment = Payment.query.filter_by(booking_id=id).first()\n if not booking:\n return \"DELETED\"\n db.session.delete(booking)\n db.session.delete(payment)\n db.session.commit()\n return redirect(url_for('bookings.booking_index'))", "def delete(self):\n query = \"DELETE FROM \" + self.table + \" WHERE \" + self.idfield + \"=%s\"\n dbh = dbstuff.getRW(self.dbh_key)\n try:\n c = dbh.cursor()\n c.execute(query, self.id)\n c.close()\n dbh.commit()\n finally:\n dbstuff.release(dbh,self.dbh_key)", "def delete_event(self, event):\n self._delete('events', self._build_params(uuid=event))", "def delete(self):\n\t\tself.eventStatus = 0\n\t\tself.save()\n\n\t\t# DEBUG\n\t\tcontext = {\n\t\t\t'call_stack': ''.join(traceback.format_stack()),\n\t\t\t'event': self,\n\t\t\t'server_addr': settings.SERVER_ADDRESS,\n\t\t}\n\t\tbody = render_to_string('MHLCallGroups/Scheduler/email_delete_event.txt', context)\n\t\tmail_admins(_('Event Deletion Attempt!'), body)", "def emeventdelete(request):\n if(request.GET):\n eid=request.GET.get(\"id\")\n s=\"delete from tbleventprograms where pId='\"+str(eid)+\"'\"\n try:\n c.execute(s)\n db.commit()\n except:\n pass\n else:\n return HttpResponseRedirect(\"/emevent\")\n return render(request,\"emevent.html\")", "def delete(self, request, pk):\n employee = EmployeeDetail.objects.get(pk=pk)\n employee.delete()\n return Response(\n data=' Entry deleted',\n status=status.HTTP_400_BAD_REQUEST\n )", "def delete_meal():", "def delete_task(task_id):\n db.task.delete_many({'_id': ObjectId(task_id)})", "def delete_business(yelp_id, conn):\n return conn.execute(Business.delete().where(Business.c.yelp_id == yelp_id))", "def delete(self)->None:\n database.cursor.execute(\n \"DELETE FROM {} WHERE id={}\".format(self.table_name, self.id))\n database.connection.commit()", "def delete_incident(self, id):\n sql = f\"DELETE FROM incidences WHERE incidences.id ={id}\"\n conn = Db().con\n curr = conn.cursor()\n curr.execute(sql)\n conn.commit()", "def delete(ident):\n con = mdb.connect(constants.sql_.IP, constants.sql_.USER, constants.sql_.PASS,\n constants.sql_.DB)\n with con:\n cur = con.cursor()\n sql = 'DELETE FROM '+constants.sql_tables.cron.name+' WHERE id = '+ str(ident)\n cur.execute(sql)\n con.close()", "def delete(self)->None:\n database.cursor.execute(\n \"DELETE FROM {} WHERE id = %s\".format(self.table_name), (self.id))\n database.connection.commit()", "def delete_presentation(self, talk_id):\r\n QtSql.QSqlQuery('''DELETE FROM presentations WHERE Id=\"%s\"''' % talk_id)\r\n log.info(\"Talk %s deleted.\" % talk_id)", "async def delete(self, pk):\n\n try:\n return await self.db.delete(pk)\n except IntegrityError:\n raise ConflictException(\n f\"That {self.db_model_name} cannot be deleted since it is actively used\"\n )", "def delete(self, id):\n empleadoeliminar = EmployeeModel.query.filter_by(employee_id=id).first()\n if empleadoeliminar:\n db.session.delete(empleadoeliminar)\n db.session.commit()\n return 201\n api.abort(404)", "def delete_question(request, question_id):\n raise NotImplementedError", "def delete_anime(utoa):\n db.session.query(UserToAnime)\\\n .filter(UserToAnime.userId == utoa.userId, UserToAnime.malId == utoa.malId)\\\n .delete()\n db.session.commit()", "def test_delete_patient(self):\n response = self.client.delete(\n reverse('patient:patient-detail', kwargs={'pk': Patient.objects.get().id}))\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(Patient.objects.count(), 0)", "def delete(self):\r\n self.domain.delete_item(self)", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self, request, pk, pk_reminder, format=None):\n reminder = self.get_reminder(pk=pk_reminder)\n reminder.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)", "def delete(self, request, pk, pk_reminder, format=None):\n reminder = self.get_reminder(pk=pk_reminder)\n reminder.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)", "def delete(self, request, pk, pk_reminder, format=None):\n reminder = self.get_reminder(pk=pk_reminder)\n reminder.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)", "def delete(self):\n ...", "def delete_conference(request, pk):\n conference = ConferenceLog.objects.get(pk=pk)\n conference.delete()\n return redirect('readapi:index')", "def delete_data(self):\n conn = self._connect_DB()\n cur = conn.cursor()\n cur.execute(\"DELETE FROM movie_table;\")\n self._close_connection(conn)", "def delete_entry(self, scenario_id):\n sql = self.delete(\"id\")\n self.cur.execute(sql, (scenario_id,))", "def delete_calendar(calendar):\n with session_scope(DBSession) as session:\n calendar_to_delete = session.query(Calendar).filter(\n Calendar.user_id == calendar.user_id, Calendar.id == calendar.id).first()\n session.delete(calendar_to_delete)", "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs):\n pass", "def delete_record():\n global data_base, table, choice, res, confirmation, output1, place_for_enter, choice_row\n choice_row = choice.get()\n sqlite3_simple_delete_record(data_base, table, choice_row, res)\n output1.delete(1.0, END)\n confirmation.after(1, confirmation.destroy)\n place_for_enter.delete(0, END)", "def delete_entry_from_db(entry):\n db.session.delete(entry)\n db.session.commit()", "def delete_song_petition(request, id):\n # instance gets the id from the Song Petition selected\n instance = get_object_or_404(SongPetition, id=id)\n # delete method deletes the instance from the database\n instance.delete()\n # Feedbacj message telling that the petition was deleted \n messages.success(request, \"Petition succesfully deleted\")\n return redirect(\"petition:list\")", "def event_delete(req):\n event_id = req.match_dict['event_id']\n try:\n db_conn.event_delete(event_id)\n json = {'deleted': True}\n except Exception as e:\n json = {'errors': [str(e)]}\n return req.Response(json=json)", "def delete(self, id=None):\n if id:\n boat = test4ValidEntity(id)\n if boat == None:\n self.response.set_status(404)\n else:\n if boat.at_sea == False:\n query = Slip.query(Slip.current_boat == boat.id)\n result = query.fetch(limit = 1)\n for match in result:\n match.current_boat = None\n match.arrival_date = None\n match.put()\n boat.key.delete()\n self.response.write(\"Boat has been deleted!\") \n else:\n boat.key.delete()\n self.response.write(\"Boat has been deleted!\")", "def command_delete(date, start_time, calendar):\n if date in calendar.keys():\n ind = None\n for i, ev in enumerate(calendar[date]):\n if ev[\"start\"] == start_time:\n ind = i\n break\n if not ind == None:\n calendar[date].pop(ind)\n else:\n return f\"There is no event with start time of {start_time} on date {date} in the calendar\"\n\n if calendar[date] == []:\n del calendar[date]\n\n return True\n else:\n return f\"{date} is not a date in the calendar\"", "def delete_exam(request, exam_id):\n\n\temp = models.Employee.objects.get(user=request.user)\n\tif not emp.exam_permit:\n\t\traise Http404\n\texam = models.ExamName.objects.filter(\n\t\tpk=exam_id, soft_delete=False\n\t).first()\n\tif not exam:\n\t\traise Http404\n\texam.soft_delete = True\n\tactivity = 'Deleted Exam' + str(exam) + '.\\n'\n\texam.save(update_fields=['soft_delete'])\n\thistory = models.History(\n\t\t\t\tuser=emp,\n\t\t\t\tactivity=activity,\n\t\t\t\tactivity_type=\"delete exam\"\n\t\t\t)\n\thistory.save()\n\treturn HttpResponseRedirect('/view-exams')", "def service_delete(service):\n db = model.Session()\n service = _must_find(db, model.Service, service)\n db.delete(service)\n db.commit()\n\n\n # API Code #\n ############", "def delete_app(self, name):\n raise NotImplementedError", "def delete_task(self):\n tasks = self.session.query(self.Table).order_by(self.Table.deadline).all()\n if tasks:\n print('Chose the number of the task you want to delete:')\n for n, task in enumerate(tasks, 1):\n print(f'{n}. {task.task}. {task.deadline.strftime(\"%d %b\")}')\n self.session.query(self.Table).filter(self.Table.id == tasks[int(input())-1].id).delete()\n self.session.commit()\n else:\n print('Nothing to delete!')\n print()", "def event_delete(request, event_ident):\n try:\n event = Event.get_by_ident(event_ident)\n event.delete()\n\n messages.success(request,\n 'Event and its tasks were deleted successfully.')\n return redirect(reverse('all_events'))\n except ObjectDoesNotExist:\n raise Http404(\"No event found matching the query.\")\n except ProtectedError as e:\n return _failed_to_delete(request, event, e.protected_objects)", "def delete_event(event_id):\n try:\n events_coll.delete_one({\"_id\": ObjectId(event_id)})\n except Exception as e:\n print(e)", "def delete(self, _id):\n self._db[_id].delete()", "def delete(self, db):\n # Delete id if given\n if self._id is not None:\n self.collection(db).delete_one(\n filter={'_id': ObjectId(self._id)})", "def delete(self):\r\n db.session.delete(self)\r\n db.session.commit()", "def delete(self, sender, instance):\n adapter = self.adapters[sender]\n feed = adapter.get_feed_url(instance) or self.feed\n if adapter.can_delete(instance):\n client = self.get_client()\n event_id = CalendarEvent.objects.get_event_id(instance, feed)\n if event_id:\n client.events().delete(calendarId=feed, eventId=event_id).execute()\n CalendarEvent.objects.delete_event_id(instance, feed)", "def delete_item(self, table_name: str, primary_key: dict):\n table = self.resource.Table(table_name)\n table.delete_item(Key=primary_key)", "def deletePlayers():\n #deletes the contents of table players\n DB().execute(\"DELETE FROM players\", True)", "def delete_event(id):\n check_admin()\n\n event = Event.query.get_or_404(id)\n guests = GuestList.query.filter_by(event_id=event.id).all()\n\n for guest in guests:\n if guest.event_id == event.id:\n db.session.delete(guest)\n db.session.commit()\n\n db.session.delete(event)\n db.session.commit()\n flash('You have successfully deleted the event.')\n\n # redirect to the events page\n return redirect(url_for('admin.list_events'))\n\n return render_template(title=\"Delete Event\")", "async def delete_one(self, where):\n\n pass", "def delete_event_by_company(company):\n connection = get_connection()\n cursor = connection.cursor()\n sql_string = \"DELETE FROM Event WHERE company='\"+company+\"'\"\n cursor.execute(sql_string)\n connection.commit()", "def delete_events(usrservice,calservice):\r\n print(args.action, args.inuser, 'celendar events')", "def delete_point(self, key, table):\n if table == 'checkpoints':\n column = 'checkpoint_id'\n elif table == 'traversed':\n column = 'traversed_id'\n else:\n table = 'waypoints'\n key = self.__compound_key(key)\n column = 'waypoint_id'\n\n sql = f'DELETE FROM {table} WHERE {column}=?'\n connection = self.__create_connection()\n cur = connection.cursor()\n cur.execute(sql, (key,))\n connection.commit()", "def delete_table(self, name: str) -> None:", "def delete_ip(ip):\n sql = sqlite3.connect('data.db')\n cursor = sql.cursor()\n\n # Deleting single record now\n sql_delete_query = \"\"\"DELETE from Status where ip = ?\"\"\"\n\n cursor.execute(sql_delete_query, [ip])\n sql.commit()\n\n logging.debug(\"Record deleted successfully \")\n\n cursor.close()\n sql.close()", "def delete_record(records):\n delete_record()", "def taskdetail_delete(td):\n return IMPL.taskdetail_delete(td)", "def delete(anime_viewed_id):\n if isinstance(anime_viewed_id, int):\n anime_viewed = AnimeViewed.query.filter_by(id=anime_viewed_id).first()\n\n if not anime_viewed:\n abort(Response(f'The anime viewed with the ID {anime_viewed_id} was not found.', 404))\n\n anime_viewed.delete()\n\n return make_response(jsonify({}), 200)\n else:\n abort(Response(f'The specified anime viewed ID is invalid. Is not a number.', 400))", "def delete_record(uuid):\n\n collection[uuid].delete()\n return redirect('/')", "def delete_teams_by_date(self, datetime):\n self.execute(TABELLE['teams']['delete']['by_date'], (datetime,))", "def deletePod(peaPodInstance,**kwargs):\n\treturnToHome=True\n\treturnToHome=kwargs.get(\"returnToHome\",returnToHome)\n\n\tchoice=messagebox.askokcancel(\"Sure\",\"Are you sure you want to delete this pod?\")\n\tif choice:\n\t\tcurrentMaster=masterPod.currentMasterPod\n\t\tfor pea in currentMaster.peas:\n\t\t\tcurrentPeaPod=currentMaster.peas[pea]\n\t\t\tif currentPeaPod == peaPodInstance:\n\t\t\t\tdel currentMaster.peas[pea]\n\t\t\t\tbreak\n\t\t#Save\n\t\tcurrentMaster.save()\n\n\t\tif returnToHome:\n\t\t\tgoHome()", "def delete(self, **kwargs):\n self.dbdel('note', kwargs)", "def delete_item(self):\n\n\t\tdb.session.delete(self)\n\t\tdb.session.commit()", "def test_delete_podcast_episode(mocker):\n patched_delete_task = mocker.patch(\"search.search_index_helpers.deindex_document\")\n episode = PodcastEpisodeFactory.create()\n deindex_podcast_episode(episode)\n assert patched_delete_task.called is True\n assert patched_delete_task.call_args[0] == (\n gen_podcast_episode_id(episode),\n PODCAST_EPISODE_TYPE,\n )", "def _delete_from_db(self, instance: DBModelInstance) -> None:\n self.db.session.delete(instance)\n self.db.session.commit()", "def cmd_delete_employee():\r\n id = request.form.get('id', \"\")\r\n confirm = request.form.get(\"confirm\", \"\")\r\n if confirm != \"DELETE\":\r\n flash(f\"Contact '{id}' NOT deleted. Please enter DELETE in the confirm field.\")\r\n return redirect(url_for('main.jobs'))\r\n \r\n index = get_employee_by_id(id)\r\n User.query.filter(User.id == id).delete()\r\n db.session.commit()\r\n\r\n\r\n if index != None:\r\n flash(f\"Employee '{id}' was succesfully deleted!\")\r\n return redirect(url_for('main.employees'))\r\n else:\r\n flash(f\"Employee '{id}' was not found\")\r\n return redirect(url_for('main.employees'))", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()" ]
[ "0.7246865", "0.69911546", "0.6967758", "0.69243217", "0.6683708", "0.6667006", "0.64580506", "0.6360693", "0.63219357", "0.6229985", "0.6189006", "0.6186252", "0.6182702", "0.615862", "0.61367667", "0.6119257", "0.6077346", "0.60747683", "0.60690594", "0.6059356", "0.605511", "0.6051097", "0.6006715", "0.5988738", "0.5973764", "0.5956932", "0.59503645", "0.59375435", "0.59332514", "0.5920803", "0.5883893", "0.5865359", "0.5858888", "0.5845066", "0.5828199", "0.58260274", "0.5820496", "0.58183974", "0.5790538", "0.5784759", "0.5780109", "0.5779657", "0.5777921", "0.5776401", "0.5773597", "0.57731676", "0.5769195", "0.5762357", "0.5762357", "0.5762357", "0.5762357", "0.5758934", "0.5758934", "0.5758934", "0.5747747", "0.57445586", "0.5739648", "0.5730439", "0.57256484", "0.57226425", "0.57226425", "0.57191086", "0.5718013", "0.57015955", "0.5699559", "0.5695908", "0.56866443", "0.568607", "0.5684647", "0.5682271", "0.567768", "0.567376", "0.56710345", "0.5668885", "0.56649745", "0.5663537", "0.56599057", "0.5646026", "0.5639562", "0.5631176", "0.5622644", "0.5622057", "0.5616503", "0.56064045", "0.5606232", "0.5604415", "0.56026506", "0.55900615", "0.5590027", "0.55891806", "0.55883706", "0.5580623", "0.55798775", "0.55758846", "0.5571151", "0.5571033", "0.5569558", "0.556594", "0.556594", "0.556594" ]
0.7938065
0
Creates a customisable calendar
def create_calendar(request): if request.method == 'POST': form = CalendarForm(request.POST) if form.is_valid(): calendar = form.save(commit=False) # prvent form from saving since we need to link company calendar.company = request.user.company calendar.save() return redirect('appointment:calendar_list') else: form = CalendarForm() return render(request, 'calendar_form.html', {'form': form})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_calendar(\n self, name=None, cal_id=None, supported_calendar_component_set=None\n ):\n return self.calendar_home_set.make_calendar(\n name,\n cal_id,\n supported_calendar_component_set=supported_calendar_component_set,\n )", "def create_events_calendar():\n service = get_calendar_service()\n if not service:\n return\n calendar = {\n 'summary': 'Ting som skjer i Telemarkgruppa',\n 'timeZone': 'Europe/Oslo',\n }\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {\n 'role': 'reader',\n 'scope': {\n 'type': 'default'\n }\n }\n acl_insert_response = service.acl().insert(calendarId=cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response", "def make_calendar(\n self, name=None, cal_id=None, supported_calendar_component_set=None\n ):\n return Calendar(\n self.client,\n name=name,\n parent=self,\n id=cal_id,\n supported_calendar_component_set=supported_calendar_component_set,\n ).save()", "def create_calendar(actions, location_and_time_axes):\n calendar = ical.Calendar()\n calendar['PRODID'] = '{} {}'.format(ical.__name__, ical.__version__)\n calendar['VERSION'] = 2.0\n calendar['X-WR-CALNAME'] = 'PyCon.DE 2018'\n\n for location, date in actions.keys():\n meta_info = location_and_time_axes[(date.year, date.month, date.day)]\n time_axis = meta_info['time_axis']\n for action in actions[(location, date)]:\n if action['title'] == 'End':\n continue\n\n event = create_event(action, date, location, time_axis)\n\n calendar.add_component(event)\n\n return calendar", "def mkcalendar(self, url, body=\"\", dummy=None):\n return self.request(url, \"MKCALENDAR\", body)", "def calendar_for_event_description(ed):\n return icemac.ab.calendar.interfaces.ICalendar(ed.context)", "def _InsertCalendar(self, title='Little League Schedule',\n description='This calendar contains practice and game times',\n time_zone='America/Los_Angeles', hidden=False, location='Oakland',\n color='#2952A3'):\n print 'Creating new calendar with title \"%s\"' % title\n calendar = gdata.calendar.data.CalendarEntry()\n calendar.title = atom.data.Title(text=title)\n calendar.summary = atom.data.Summary(text=description)\n calendar.where.append(gdata.calendar.data.CalendarWhere(value=location))\n calendar.color = gdata.calendar.data.ColorProperty(value=color)\n calendar.timezone = gdata.calendar.data.TimeZoneProperty(value=time_zone)\n\n if hidden:\n calendar.hidden = gdata.calendar.data.HiddenProperty(value='true')\n else:\n calendar.hidden = gdata.calendar.data.HiddenProperty(value='false')\n\n new_calendar = self.cal_client.InsertCalendar(new_calendar=calendar)\n return new_calendar", "def create_calendar():\n calendar = Calendar()\n index = 0\n parameters = read_input.get_args()\n with open(parameters.filename, newline='') as csvfile:\n csv_opener = DictReader(csvfile, delimiter=',')\n for index, row in enumerate(csv_opener, start=1):\n event = CalendarEvent()\n event.name = row[parameters.title]\n event.begin = row[parameters.date]\n event.description = row[\"description\"]\n calendar.events.add(event)\n print(f\"Created {index} events.\")\n export_calendar(calendar, parameters.output)", "def createEvents(self):\n # If modifying these scopes, delete the file token.pickle.\n SCOPES = ['https://www.googleapis.com/auth/calendar']\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n # create the service variable\n service = build('calendar', 'v3', credentials=creds)\n\n # Call the Calendar API\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n\n # adds self.mainEvent to calendar\n event = service.events().insert(calendarId='primary', body=self.mainEvent).execute()\n print('Event created: %s' % (event.get('htmlLink')))\n\n # adds all values in self.importantDates to calendar\n for i in self.importantDates:\n event = service.events().insert(calendarId='primary', body=i).execute()\n print('Event created: %s' % (event.get('htmlLink')))", "def calendar(self, calendar_id):\r\n return c.Calendar(self, calendar_id)", "def calendar(self, calendar_id):\r\n return c.Calendar(self, calendar_id)", "def getCalendar(self):\n cal = BlankCalendar()\n for datable in self.run_query():\n cal.add_component(datable.getEvent())\n \n return cal", "def _calendar(self):\n schedule = self.account.schedule()\n calendar = self._calendars[config.outlook_calendar]\n\n return calendar", "def do_create(service,summary,description,startday,\\\n starttime,endtime,username,email):\n event = {\n 'summary': 'Code Clinic: {}'.format(summary),\n 'description': '{}.'.format(description),\n 'start': {\n 'dateTime': '{}T{}:00'.format(startday, starttime),\n 'timeZone': 'GMT+02',\n },\n 'end': {\n 'dateTime': '{}T{}:00'.format(startday,endtime),\n 'timeZone': 'GMT+02',\n },\n 'recurrence': [\n 'RRULE:FREQ=DAILY;COUNT=1'\n ],\n 'attendees': [\n {\n 'displayName': username,\n 'email': email,\n 'optional': True,\n 'comment': 'Creator',\n 'responseStatus': 'accepted',\n },\n ],\n 'anyoneCanAddSelf': True,\n\n 'reminders': {\n 'useDefault': False,\n 'overrides': [\n {'method': 'email', 'minutes': 24 * 60},\n {'method': 'popup', 'minutes': 10},\n ],\n },\n }\n\n event = service.events().insert(calendarId='primary', body=event,\\\n sendUpdates='all').execute()\n\n return event", "def generate_ics(events, config):\n\n # Create the Calendar\n calendar = icalendar.Calendar()\n calendar.add('prodid', config.calendar_prodid)\n calendar.add('version', '2.0')\n calendar.add('method', 'publish')\n\n for event_data in events:\n # Create the event\n event = icalendar.Event()\n\n # Populate the event\n event.add('summary', event_data['title'])\n event.add('description', get_description(event_data))\n event.add('uid', event_data['id'])\n event.add('location', event_data['place'])\n event.add('dtstart', get_datetime(event_data, 'when_start'))\n if event_data['when_end']:\n event.add('dtend', get_datetime(event_data, 'when_end'))\n event.add('dtstamp', datetime.datetime.now())\n\n # Add the event to the calendar\n calendar.add_component(event)\n\n return calendar.to_ical()", "def get_custom_date(self):\n _hol_center = None\n _config = __CFG__.Config()\n if _config.add_section(__DEF__.CONFIG_SECTION_CALENDAR):\n _cal = self.calendar.lower()\n if _cal in _config:\n try:\n _file = os.path.join(__DEF__.ROOT_DATA, _config[_cal])\n with open(_file, 'r') as _txt:\n _week_mask = \"Mon Tue Wed Thu Fri\"\n _hol_mask = [_lgn.pop() for _lgn in csv.reader(_txt)]\n _hol_center = CustomBusinessDay(holidays=_hol_mask,\n weekmask=_week_mask)\n except IOError:\n __LOG__.critical(\"Failing %s\", _config[_cal], exc_info=True)\n else:\n __LOG__.warning(\"Missing %s\", _cal, exc_info=True)\n else:\n __LOG__.error(\"Reading %s\", __DEF__.CONFIG_SECTION_CALENDAR, exc_info=True)\n # Output get_custom_date\n return _hol_center", "def calendar(self, name=None, cal_id=None, cal_url=None):\n if not cal_url:\n return self.calendar_home_set.calendar(name, cal_id)\n else:\n return Calendar(self.client, url=self.client.url.join(cal_url))", "def calendar_events(self):\r\n return CalendarEvents(self)", "def __init__(self, d, m, y):\n\n self.set_calendar(d, m, y)", "def _create(self, name=None, id=None, supported_calendar_component_set=None):\n if id is None:\n id = str(uuid.uuid1())\n self.id = id\n\n path = self.parent.url.join(id + \"/\")\n self.url = path\n\n # TODO: mkcalendar seems to ignore the body on most servers?\n # at least the name doesn't get set this way.\n # zimbra gives 500 (!) if body is omitted ...\n\n prop = dav.Prop()\n if name:\n display_name = dav.DisplayName(name)\n prop += [\n display_name,\n ]\n if supported_calendar_component_set:\n sccs = cdav.SupportedCalendarComponentSet()\n for scc in supported_calendar_component_set:\n sccs += cdav.Comp(scc)\n prop += sccs\n set = dav.Set() + prop\n\n mkcol = cdav.Mkcalendar() + set\n\n r = self._query(\n root=mkcol, query_method=\"mkcalendar\", url=path, expected_return_value=201\n )\n\n # COMPATIBILITY ISSUE\n # name should already be set, but we've seen caldav servers failing\n # on setting the DisplayName on calendar creation\n # (DAViCal, Zimbra, ...). Doing an attempt on explicitly setting the\n # display name using PROPPATCH.\n if name:\n try:\n self.set_properties([display_name])\n except:\n ## TODO: investigate. Those asserts break.\n error.assert_(False)\n try:\n current_display_name = self.get_display_name()\n error.assert_(current_display_name == name)\n except:\n log.warning(\n \"calendar server does not support display name on calendar? Ignoring\",\n exc_info=True,\n )\n error.assert_(False)", "def calendar(self):\r\n self.cal = QCalendarWidget()\r\n self.cal.setWindowTitle(\"Get Birthday\")\r\n self.cal.show()\r\n self.cal.clicked.connect(self.dateB)", "def buildAPICal(self, credentials):\n from googleapiclient.discovery import build\n return build('calendar', 'v3', credentials=self.creds)", "def add_to_calender(service, username): \n colors = service.colors().get().execute()\n d_and_t = df.get_add_to_calender_input(argv[1], argv[2])\n now = datetime.datetime.now()\n if d_and_t == None:\n return\n event_request_body = {\n 'start': {\n 'dateTime': df.convert_to_RFC_datetime(d_and_t[0], d_and_t[1], d_and_t[2], d_and_t[3][0]-2, d_and_t[3][1]),\n 'timeZone': 'Africa/Johannesburg'\n },\n 'end': {\n 'dateTime': df.convert_to_RFC_datetime(d_and_t[0], d_and_t[1], d_and_t[2], d_and_t[4][0]-2, d_and_t[4][1]),\n 'timeZone': 'Africa/Johannesburg'\n },\n 'summary': f\"{username} - Code Clinic\",\n 'description': 'empty',\n 'status': 'confirmed',\n 'transparency': 'opaque',\n 'visibility': 'public',\n 'location': 'WTC',\n 'guestsCanModify': True,\n 'attendees': [\n { \n 'displayName': username,\n 'organizer': True,\n 'email': f'{username}@student.wethinkcode.co.za',\n 'optional': True,\n 'responseStatus': 'accepted'\n }\n ]\n }\n start = event_request_body['start']['dateTime']\n end = event_request_body['end']['dateTime']\n\n overlaps = check_if_slots_overlap(start, end, service, username)\n if overlaps == False:\n response = service.events().insert(calendarId=get_events.calendar_id, sendUpdates='all', body=event_request_body).execute()\n print(\"\\nYour slot has been created...\")\n else:\n print(\"\\nYou already have an event scheduled for this time. Please choose another time...\")\n events, count = get_events.get_events_for_next_7_days_to_delete(username, service)\n if count == 0:\n print(\"\\nYou currently don't have any slots created.\")\n return", "def new_calendar(self, calendar_name):\n if not calendar_name:\n return None\n\n url = self.build_url(self._endpoints.get('root_calendars'))\n\n response = self.con.post(url, data={self._cc('name'): calendar_name})\n if not response:\n return None\n\n data = response.json()\n\n # Everything received from cloud must be passed as self._cloud_data_key\n return self.calendar_constructor(parent=self,\n **{self._cloud_data_key: data})", "def ical(self) -> Calendar:\n cal = Calendar()\n event = IEvent()\n event.add(\"summary\", \"Video Chat\")\n event.add(\"dtstart\", self.start)\n cal.add_component(event)\n return cal.to_ical()", "def bootstrap_calendar_js(*args, **kwargs):\n\n options = {}\n\n try:\n options[\"language\"] = kwargs[\"language\"]\n except KeyError:\n pass\n\n try:\n options[\"events_url\"] = kwargs[\"events_url\"]\n except KeyError:\n options[\"events_url\"] = '/calendar/json/'\n\n try:\n options[\"view\"] = kwargs[\"view\"]\n except KeyError:\n options[\"view\"] = 'month'\n\n try:\n options[\"language\"] = kwargs[\"language\"]\n except KeyError:\n options[\"language\"] = 'en'\n\n try:\n options[\"first_day\"] = kwargs[\"first_day\"]\n except KeyError:\n options[\"first_day\"] = 2\n\n try:\n options[\"width\"] = kwargs[\"width\"]\n except KeyError:\n options[\"width\"] = '100%'\n\n try:\n options[\"time_start\"] = kwargs[\"time_start\"]\n except KeyError:\n options[\"time_start\"] = '00:00'\n\n try:\n options[\"time_end\"] = kwargs[\"time_end\"]\n except KeyError:\n options[\"time_end\"] = '24:00'\n\n return render_to_string(\n 'django_bootstrap_calendar/calendar_js.html',\n options\n )", "def modify_cal(cal, convert_dic):\n new_cal = Calendar()\n for elm in cal.walk():\n if elm.name == \"VEVENT\":\n event = elm\n event[\"summary\"] = convert_dic[str(elm.get(\"summary\"))] \n new_cal.add_component(event)\n return new_cal", "def abrirCalendar():\n try:\n var.dlgcalendar.show()\n except Exception as error:\n print('Error: %s ' % str(error))", "def bootstrap_calendar(css_classes):\n return render_to_string(\n 'django_bootstrap_calendar/calendar.html',\n {'css_classes': css_classes}\n )", "def show_cal(request, year=None, month=None):\n if year == None:\n # get the current comic as a starting point\n lToday = Comic.objects.filter(published=True).order_by('-date')[0].date\n year = lToday.year\n month = lToday.month\n\n return calendar(request, year, month)", "def calendar_choices(self):\n if not self._calendars:\n if self.authenticated:\n default = self.account.schedule().get_default_calendar()\n # {\n # \"default\" : <DEFAULT_CALENDAR>,\n # \"<CALENDAR_NAME>: <CALENDAR>,\n # ...\n # }\n self._calendars = {\n DEFAULT_CALENDAR: default,\n **{\n c.name: c\n for c in self.account.schedule().list_calendars() if c.name != default.name\n }\n }\n\n return self._calendars", "def create_event(user, cls, date):\n\n name = cls['name']\n day_of_week = cls['day']\n time = cls['time']\n location = cls['location']\n\n start_time, end_time = [extract_time(t.strip()) for t in time.split('-')]\n\n # Get datetime for next occurrence of day of week, starting from today\n end_date = datetime(date.year, date.month, date.day) + timedelta(weeks=10) - timedelta(minutes=1)\n start = datetime(date.year, date.month, date.day, hour=start_time[0], minute=start_time[1])\n end = datetime(date.year, date.month, date.day, hour=end_time[0], minute=end_time[1])\n\n # Prevents creation of already-imported events through hashing of class info\n event, created = Event.objects.get_or_create(\n calendar=user.calendars.get(title='Default'),\n import_hash=hash(frozenset(cls.items()))\n )\n\n event.title = name\n event.start = start\n event.end = end\n event.location = location\n event.description = \"Imported by TritonSync\"\n event.save()\n\n weekdays = ['0'] * 7\n weekdays[DAYS_OF_WEEK[day_of_week]] = '1'\n\n if created:\n WeeklyRecurrence.objects.create(\n days_of_week=''.join(weekdays),\n frequency=1,\n last_event_end=end_date,\n event=event\n )\n else:\n recurrence = WeeklyRecurrence.objects.get(event=event)\n recurrence.last_event_end = end_date\n recurrence.save()\n\n return event", "def getCalendar(self):\n return aq_parent(aq_inner(self))", "def __init__(self):\n self._calendars = {}\n self._scopes = SCOPES\n self._client_id = CLIENT_ID\n self._auth_type = AUTH_TYPE\n self._credentials = (\n self._client_id,\n )\n self.account = Account(\n credentials=self._credentials,\n auth_flow_type=self._auth_type,\n token_backend=WorkdayTokenBackend(),\n )\n self.itinerary_map = {\n MULTIPLE_EVENTS: self.generate_itinerary_multiple_events,\n SINGLE_EVENT: self.generate_itinerary_single_event,\n }", "def test_public_calendar_date(self):\n today = timezone.now().date()\n b4 = today + relativedelta(days=-1)\n one = today + relativedelta(days=7)\n two = today + relativedelta(days=14)\n year = today + relativedelta(years=1)\n public = PermissionFactory(slug=Permission.PUBLIC)\n publish = StatusFactory(publish=True)\n start = timezone.now().time()\n EventFactory(\n description='a', start_date=one, start_time=start, status=publish,\n permission=public,\n )\n EventFactory(\n description='b', start_date=two, start_time=start, status=publish,\n permission=public,\n )\n # do NOT include this one because it is older than two months\n EventFactory(\n description='c', start_date=year, start_time=start, status=publish,\n permission=public,\n )\n # do NOT include this one because it for yesterday\n EventFactory(\n description='d', start_date=b4, start_time=start, status=publish,\n permission=public,\n )\n events = Event.objects.public_calendar()\n self.assertEquals(\n ['a', 'b'],\n [e.description for e in events]\n )", "def __addCalendar(self, store, element):\n \n calendar = self.__getStore(store, element.get(\"type\"))\n\n # Months Widths\n if element.find(\"months/monthContext/monthWidth\") is not None:\n months = self.__getStore(calendar, \"month\")\n for child in element.findall(\"months/monthContext/monthWidth\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if not format in months:\n months[format] = {}\n \n for month in child.findall(\"month\"):\n if not month.get(\"draft\"):\n name = month.get(\"type\").upper()\n if not name in months[format]:\n months[format][name] = month.text\n\n\n # Day Widths\n if element.find(\"days/dayContext/dayWidth\") is not None:\n days = self.__getStore(calendar, \"day\")\n for child in element.findall(\"days/dayContext/dayWidth\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if not format in days:\n days[format] = {}\n\n for day in child.findall(\"day\"):\n if not day.get(\"draft\"):\n name = day.get(\"type\").upper()\n if not name in days[format]:\n days[format][name] = day.text\n\n\n # Quarter Widths\n if element.find(\"quarters/quarterContext/quarterWidth\") is not None:\n quarters = self.__getStore(calendar, \"quarter\")\n for child in element.findall(\"quarters/quarterContext/quarterWidth\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if not format in quarters:\n quarters[format] = {}\n\n for quarter in child.findall(\"quarter\"):\n if not quarter.get(\"draft\"):\n name = quarter.get(\"type\").upper()\n if not name in quarters[format]:\n quarters[format][name] = quarter.text\n \n \n # Date Formats\n if element.find(\"dateFormats/dateFormatLength\") is not None:\n dateFormats = self.__getStore(calendar, \"date\")\n for child in element.findall(\"dateFormats/dateFormatLength\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\").upper()\n text = child.find(\"dateFormat/pattern\").text\n if not format in dateFormats:\n dateFormats[format] = text\n\n\n # Time Formats\n if element.find(\"timeFormats/timeFormatLength\") is not None:\n timeFormats = self.__getStore(calendar, \"time\")\n for child in element.findall(\"timeFormats/timeFormatLength\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\").upper()\n text = child.find(\"timeFormat/pattern\").text\n if not format in timeFormats:\n timeFormats[format] = text\n \n \n # DateTime Formats\n if element.find(\"dateTimeFormats/availableFormats\") is not None:\n datetime = self.__getStore(calendar, \"datetime\")\n for child in element.findall(\"dateTimeFormats/availableFormats/dateFormatItem\"):\n if not child.get(\"draft\"):\n # no uppercase here, because of intentianal camelcase\n format = child.get(\"id\")\n text = child.text\n if not format in datetime:\n datetime[format] = text\n \n \n # Fields\n if element.find(\"fields/field\") is not None:\n fields = self.__getStore(calendar, \"field\")\n for child in element.findall(\"fields/field\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\").upper()\n for nameChild in child.findall(\"displayName\"):\n if not nameChild.get(\"draft\"):\n text = nameChild.text\n if not format in fields:\n fields[format] = text\n break\n \n \n # Relative\n if element.find(\"fields/field\") is not None:\n relatives = self.__getStore(calendar, \"relative\")\n for child in element.findall(\"fields/field\"):\n if not child.get(\"draft\"):\n format = child.get(\"type\")\n if child.findall(\"relative\"):\n relativeField = self.__getStore(relatives, format)\n for relChild in child.findall(\"relative\"):\n if not relChild.get(\"draft\"):\n pos = relChild.get(\"type\")\n text = relChild.text\n if not pos in relativeField:\n relativeField[pos] = text", "def calendar(self):\n if \"calendar\" in self._prop_dict:\n if isinstance(self._prop_dict[\"calendar\"], OneDriveObjectBase):\n return self._prop_dict[\"calendar\"]\n else :\n self._prop_dict[\"calendar\"] = Calendar(self._prop_dict[\"calendar\"])\n return self._prop_dict[\"calendar\"]\n\n return None", "def calendar(self):\n if \"calendar\" in self._prop_dict:\n if isinstance(self._prop_dict[\"calendar\"], OneDriveObjectBase):\n return self._prop_dict[\"calendar\"]\n else :\n self._prop_dict[\"calendar\"] = Calendar(self._prop_dict[\"calendar\"])\n return self._prop_dict[\"calendar\"]\n\n return None", "def generate_ics(days: Sequence[dict], filename: Text) -> None:\n cal = Calendar()\n cal.add(\"X-WR-CALNAME\", \"ไธญๅ›ฝๆณ•ๅฎš่Š‚ๅ‡ๆ—ฅ\")\n cal.add(\"X-WR-CALDESC\", \"ไธญๅ›ฝๆณ•ๅฎš่Š‚ๅ‡ๆ—ฅๆ•ฐๆฎ๏ผŒ่‡ชๅŠจๆฏๆ—ฅๆŠ“ๅ–ๅ›ฝๅŠก้™ขๅ…ฌๅ‘Šใ€‚\")\n cal.add(\"VERSION\", \"2.0\")\n cal.add(\"METHOD\", \"PUBLISH\")\n cal.add(\"CLASS\", \"PUBLIC\")\n\n cal.add_component(_create_timezone())\n\n days = sorted(days, key=lambda x: x[\"date\"])\n\n for fr, to in _iter_date_ranges(days):\n start = _cast_date(fr[\"date\"])\n end = _cast_date(to[\"date\"]) + datetime.timedelta(days=1)\n\n name = fr[\"name\"] + \"ๅ‡ๆœŸ\"\n if not fr[\"isOffDay\"]:\n name = \"ไธŠ็ญ(่กฅ\" + name + \")\"\n cal.add_component(_create_event(name, start, end))\n\n with open(filename, \"wb\") as f:\n f.write(cal.to_ical())", "def calendar_lists(self):\r\n return CalendarLists(self)", "def main():\r\n creds = None\r\n # The file token.json stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('cal_token.json'):\r\n creds = Credentials.from_authorized_user_file('cal_token.json', SCOPES)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'client_secret.json', SCOPES)\r\n creds = flow.run_local_server(port=0)\r\n # Save the credentials for the next run\r\n with open('cal_token.json', 'w') as token:\r\n token.write(creds.to_json())\r\n\r\n service = build('calendar', 'v3', credentials=creds)\r\n\r\n return service", "def create_month_scr(self, month, toogle_today=False):\n\n scr = Screen()\n m = self.month_names_eng[self.active_date[1] - 1]\n scr.name = \"%s-%s\" % (m, self.active_date[2]) # like march-2015\n\n # Grid for days\n grid_layout = GridLayout(cols=7, rows=7, size_hint=(1, 1), pos_hint={\"top\": 1})\n scr.add_widget(grid_layout)\n\n # Days abbrs\n for i in range(7):\n if i >= 5: # weekends\n l = Label(text=self.days_abrs[i], color=(1, 0, 0, 1))\n else: # work days\n l = Label(text=self.days_abrs[i], text_size=(self.size[0], None), halign=\"center\")\n\n grid_layout.add_widget(l)\n\n global holiday, halfday\n\n # Buttons with days numbers\n for week in month:\n for day in week:\n if day[1] >= 6: # weekends\n self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))\n else:\n self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))\n for i in range(len(holiday)):\n if self.active_date[2] == holiday[i][2]:\n if self.active_date[1] == holiday[i][1]:\n if day[0] == holiday[i][0]:\n self.tbtn.background_color=(128, 0, 128, 1)\n for i in range(len(halfday)):\n if self.active_date[2] == halfday[i][2]:\n if self.active_date[1] == halfday[i][1]:\n if day[0] == halfday[i][0]:\n self.tbtn.background_color=(0, 255, 255, 0.5)\n\n self.tbtn.bind(on_press=self.get_btn_value)\n\n if toogle_today:\n # Down today button\n if day[0] == self.active_date[0] and day[2] == 1:\n self.tbtn.state = \"down\"\n # Disable buttons with days from other months\n if day[2] == 0:\n self.tbtn.text = \" \"\n self.tbtn.disabled = True\n self.tbtn.background_color = (0, 0, 0, 0.1)\n\n grid_layout.add_widget(self.tbtn)\n\n self.sm.add_widget(scr)", "def create_event(username, title, start, end, description=\" \", show_me_as=\"busy\"):\n _validate_params(title, start, end, description, show_me_as)\n\n token = \"tokens/\" + username + \".pkl\"\n credentials = pickle.load(open(token, \"rb\"))\n service = build('calendar', 'v3', credentials=credentials)\n\n event = dict(\n summary=title,\n start={\"date\": start, \"timeZone\": \"Europe/Athens\"},\n end={\"date\": end, \"timeZone\": \"Europe/Athens\"},\n description=description,\n transparency=transparency_d.get(show_me_as),\n guestsCanInviteOthers=False,\n )\n return service.events().insert(calendarId=CALENDAR_ID, body=event).execute()", "def add_calendar(calendar):\n with session_scope(DBSession) as session:\n session.add(calendar)", "def _create_schedules(self):\n\n ''''''", "def _trading_calendar(bundle=DEFAULT_BUNDLE):\n bundle_data = bundles.load(bundle)\n return bundle_data.equity_daily_bar_reader.trading_calendar", "def create_ical_file(list_of_events, strasse, hausnummer):\n cal = Calendar()\n\n # Some properties are required to be compliant:\n cal.add('prodid', '-//My calendar product//mxm.dk//')\n cal.add('version', '2.0')\n\n global total_number_of_events\n total_number_of_events = len(list_of_events)\n\n all_ical_events = create_cal_events(list_of_events, strasse, hausnummer)\n for evnt in all_ical_events:\n # Add the event to the calendar:\n cal.add_component(evnt)\n\n cal_as_ical = cal.to_ical()\n create_folder_if_not_exists()\n # Write iCal file to disk\n return save_ical_file(cal_as_ical, get_filename(strasse, hausnummer))", "def calendars(self):\r\n return c.Calendars(self)", "def calendars(self):\r\n return c.Calendars(self)", "def calendar(self):\n from office365.outlook.calendar.calendar import Calendar\n return self.properties.get('calendar',\n Calendar(self.context, ResourcePath(\"calendar\", self.resource_path)))", "def command_add(date, start_time, end_time, title, calendar):\n if is_calendar_date(date) and all([(i in range(0, 24)) for i in (start_time, end_time)]) and start_time <= end_time and is_natural_number(str(start_time)) and is_natural_number(str(end_time)):\n event = {\n \"start\": start_time,\n \"end\": end_time,\n \"title\": title,\n }\n if calendar.get(date) is None:\n calendar[date] = [event]\n else:\n calendar[date].insert(0, event)\n # calendar[date].append(event)\n return True\n return False", "def check_calender_api():\n cal = CalendarUtil()\n fromdate = datetime(2020, 5, 27, 19, 30, 0)\n todate = fromdate + timedelta(hours=0)\n event = cal.addToCalendar(\"avishekh.bharati@gmail.com\", fromdate, todate, \"this is summary...\")\n print(event)\n return jsonify({\"success\": True})", "def createEvent(event):\n event = {\n 'summary': event.description,\n 'location': \"\",\n 'description': \"\",\n 'start': {\n 'dateTime': event.datetime_start,\n 'timeZone': \"America/Los_Angeles\"\n },\n 'end': {\n 'dateTime': event.datetime_end,\n 'timeZone': \"America/Los_Angeles\"\n },\n }\n\n event = service.events().insert(calendarId=SF_FUNCHEAP_CAL_ID, body=event).execute()", "def calendar(request, year=None, month=None):\n today = datetime.date.today()\n year = int(year) if year else today.year\n month = int(month) if month else today.month\n try:\n first_of_month = datetime.date(year, month, 1)\n except ValueError: # Not a valid year and month\n raise Http404\n\n events = Event.objects.filter(event_start__year=year, event_start__month=month)\n cal = EventCalendar(events, year, month).formatmonth(year, month)\n\n user = request.user\n future_attending_events = attending_events(user, today)\n\n months = year * 12 + month - 1 # months since epoch (Christ)\n month_list = [\n datetime.date(m // 12, m % 12 + 1, 1) for m in range(months - 5, months + 7)\n ]\n\n # Get some random dates in the current, next, and previous month.\n # These dates are used load the calendar for that month.\n # * prev is some day in the previous month\n # * this is some day in this month\n # * next is some day in the next month\n context = {\n \"calendar\": mark_safe(cal),\n \"prev\": first_of_month - datetime.timedelta(27),\n \"this\": first_of_month,\n \"next\": first_of_month + datetime.timedelta(32),\n \"future_attending_events\": future_attending_events,\n \"month_list\": month_list,\n }\n\n return render(request, \"events/event_list.html\", context)", "def __init__(self, calendar_id=None, calendar_type=None, weekend_mask=None, source_provider=None, properties=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration.get_default_copy()\n self.local_vars_configuration = local_vars_configuration\n\n self._calendar_id = None\n self._calendar_type = None\n self._weekend_mask = None\n self._source_provider = None\n self._properties = None\n self.discriminator = None\n\n self.calendar_id = calendar_id\n self.calendar_type = calendar_type\n self.weekend_mask = weekend_mask\n self.source_provider = source_provider\n self.properties = properties", "def isocalendar(self, *args, **kwargs): # real signature unknown\r\n pass", "def add_event(creds, begin, end):\n service = build(\"calendar\", \"v3\", credentials=creds)\n\n # Call the Calendar API\n event = {\n \"description\": \"an event\",\n \"start\": {\n \"dateTime\": begin,\n \"timeZone\": \"Europe/Paris\",\n },\n \"end\": {\n \"dateTime\": end,\n \"timeZone\": \"Europe/Paris\",\n },\n }\n inserted_event = service.events().insert(calendarId=\"primary\", body=event).execute()\n print(\"Event created: %s\" % (inserted_event.get(\"htmlLink\")))", "def createCalendarEvent(self, ISBN, username):\n\n date = datetime.now()\n dueDate = (date + timedelta(days=7)).strftime(\"%Y-%m-%d\")\n time_start = \"{}T09:00:00+10:00\".format(dueDate)\n time_end = \"{}T10:00:00+10:00\".format(dueDate)\n eventID = ISBN+username.lower()\n\n event = {\n \"summary\": ISBN,\n \"id\": eventID,\n \"location\": \"RMIT Building 14\",\n \"description\": \"Book Due to be Returned\",\n \"start\": {\n \"dateTime\": time_start,\n \"timeZone\": \"Australia/Melbourne\",\n },\n \"end\": {\n \"dateTime\": time_end,\n \"timeZone\": \"Australia/Melbourne\",\n },\n \"attendees\": [\n {\"email\": \"kevin@scare.you\"},\n {\"email\": \"shekhar@wake.you\"},\n ],\n \"reminders\": {\n \"useDefault\": False,\n \"overrides\": [\n {\"method\": \"email\", \"minutes\": 5},\n {\"method\": \"popup\", \"minutes\": 10},\n ],\n }\n }\n\n event = self.service.events().insert(\n calendarId=\"primary\", body=event).execute()\n return event['id']", "def add_calendar_entries(calendar, dry_run, economic, date):\n today = date.isoformat()[:10] + \"T00:00:00Z\"\n tomorrow = (date + datetime.timedelta(days=1)).isoformat()[:10] + \"T00:00:00Z\"\n for event in calendar.get_events(today, tomorrow):\n try:\n entry = economic.convert_calendar_event_to_entry(event)\n if entry:\n economic.add_time_entry(entry, dry_run)\n except UnicodeDecodeError as e:\n print(e)", "def create_event(klass, form, creator):\n\n if form.is_recurring.data:\n # Series\n return klass.create_series(form, creator)\n # Single event\n return klass.create_single_event(form, creator)", "def __init__(self):\n import datetime as dt\n import dateutil as du\n from dateutil import relativedelta\n\n #Initial date calculations\n self.right_now = dt.datetime.utcnow()\n self.beginDelta = -2\n self.endDelta = 365\n self.timeDeltaCashBegin = du.relativedelta.relativedelta(months=self.beginDelta)\n self.timeDeltaCashEnd = dt.timedelta(days=self.endDelta)\n self.begin_date = self.right_now + self.timeDeltaCashBegin\n self.end_date = self.right_now + self.timeDeltaCashEnd\n\n #today's date to initialize the Cash event\n self.today_date = str(dt.datetime.date(self.right_now))\n\n #time variable for event creation // included date list to decipher cash update days\n self.create_begin = dt.datetime.fromisoformat(self.right_now.date().isoformat()).isoformat() + 'Z'\n self.create_end = self.end_date.isoformat() + 'Z'\n self.create_duration = (self.end_date - self.right_now).days\n self.iterate_days = self.iterateList(self.create_duration)\n\n #time variables used in deletion code\n self.clear_begin = self.begin_date.isoformat() + 'Z'\n self.clear_end = self.end_date.isoformat() + 'Z'\n\n #Smaller size for event creation/deleting testing\n self.test_duration = 40\n self.test_days = self.iterateList(self.test_duration)\n \n #Store old event list to check if changes need to be made\n self.check_for_updates = []\n self.cash_history = []\n\n self.creds = self.getUsrCreds()\n self.service = self.buildAPICal(self.creds)\n self.usrCals = self.getUsrCals(self.service)\n\n #Check if Calendar is Present and get the details -- if not, build one\n if self.checkCashCal(self.usrCals) == False:\n self.usr_csh_id, self.usr_csh_cal = self.buildCashCal(self.usrCals)\n else:\n self.usr_csh_id = self.getCshID(self.usrCals)\n self.usr_csh_cal = self.getCshCal(self.usrCals)", "def create_cal_events(list_of_events, strasse, hausnummer):\n list_of_ical_events = []\n\n for calendarEvent in list_of_events:\n event = Event()\n event = create_ical_event_from_calendar_event(event, calendarEvent, 8, 10)\n\n # Automatic encoding is not yet implemented for parameter values, so you must use the โ€˜v*โ€™ types you can import from the icalendar package (theyโ€™re defined in icalendar.prop):\n event['location'] = vText('{} {}, Bremen'.format(strasse, hausnummer))\n\n # TODO uid exaclty according to specification https://www.kanzaki.com/docs/ical/uid.html\n event['uid'] = event['dtstart'].to_ical()\n event.add('priority', 5)\n list_of_ical_events.append(event)\n\n return list_of_ical_events", "def generate_appointments(days=14):\n start = datetime.date.today()\n end = start + datetime.timedelta(days=days)\n\n subs = TimelineSubscription.objects.filter(Q(end__gte=now()) | Q(end__isnull=True))\n\n for sub in subs:\n for milestone in sub.timeline.milestones.all():\n milestone_date = sub.start.date() + datetime.timedelta(days=milestone.offset)\n #Create appointment(s) for this subscription within the task window\n if start <= milestone_date <= end:\n appt, created = Appointment.objects.get_or_create(\n subscription=sub,\n milestone=milestone,\n date=milestone_date\n )", "def create_vcal_string(start_date, location, content, uid):\n \n time_delta = settings.REMINDER_TIME_BEFORE_APPOINTMENT\n #need time difference in minutes for alarm\n alarm_time = start_date - time_delta\n end_date = start_date + settings.DEFAULT_APPOINTMENT_DURATION\n \n vcal_data = \\\n\"\"\"BEGIN:VCALENDAR\nVERSION:1.0\nBEGIN:VEVENT\nUID:%(uid)s\nDTSTART:%(start)s\nDTEND:%(end)s\nDESCRIPTION:%(content)s\nSUMMARY:%(content)s\nDTSTAMP:%(stamp)s\nLOCATION:%(location)s\nDALARM:%(alarm)s\nAALARM:%(alarm)s\nEND:VEVENT\nEND:VCALENDAR\"\"\" % {\\\n 'uid': uid,\n 'content': content,\n 'location': location,\n 'start': start_date.strftime(\"%Y%m%dT%H%M%S\"),\n 'end': end_date.strftime(\"%Y%m%dT%H%M%S\"),\n 'stamp': datetime.now().strftime(\"%Y%m%dT%H%M%S\"), \n 'alarm': alarm_time.strftime(\"%Y%m%dT%H%M%S\")}\n \n return vcal_data", "def calendar_events(self):\r\n return calendars.CalendarEvents(self)", "def __init__(self, email, password, title, location, calendar):\n self.title = title\n self.location = location\n self.calendar_title = calendar\n self.calendar_link = \"/calendar/feeds/default/private/full\"\n self.calendar_service = gdata.calendar.service.CalendarService()\n self.calendar_service.email = email\n self.calendar_service.password = password\n try:\n self.calendar_service.ProgrammaticLogin()\n except gdata.service.BadAuthentication as BadAuth:\n print(BadAuth)\n sys.exit(2)\n except gdata.service.CaptchaRequired as Captcha:\n print(Captcha)\n sys.exit(2)\n except:\n raise\n self.feed = self.calendar_service.GetOwnCalendarsFeed()\n for a_calendar in self.feed.entry:\n if self.calendar_title == a_calendar.title.text:\n self.calendar_link = a_calendar.link[0].href", "def create_google_calendar_event(self, data):\n # TODO Refactor to celery\n gcalendar = services.GoogleCalendarService()\n gcalendar.initialize()\n\n event = {\n 'summary': f'{data.get(\"group\")} - {data.get(\"title\")}',\n 'location': data.get('location'),\n 'description': data.get('description'),\n 'start': {\n 'dateTime': data.get('start').isoformat(),\n },\n 'end': {\n 'dateTime': data.get('end').isoformat(),\n },\n }\n\n created_event = gcalendar.create_event(event)\n event_id = created_event.get('id')\n event_htmllink = created_event.get('htmlLink')\n\n return event_id, event_htmllink", "def populate(self):\n record = yield self.directory.recordWithShortName(RecordType.user, u\"wsanchez\")\n yield self.transactionUnderTest().calendarHomeWithUID(record.uid, create=True)\n calendar = yield self.calendarUnderTest(name=\"calendar\", home=record.uid)\n for f in os.listdir(self.holidays_dir):\n if f.endswith(\".ics\"):\n with open(os.path.join(self.holidays_dir, f)) as fin:\n component = Component.fromString(fin.read())\n yield calendar._createCalendarObjectWithNameInternal(f, component, internal_state=ComponentUpdateState.RAW)\n yield self.commit()", "def create_event(organizer, description, location, days):\n time = timezone.now() + datetime.timedelta(days=days)\n return Event.objects.create(event_organizer=organizer, event_desctiption=description, event_location=loaction, event_date = time)", "def __calender_events(self):\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('calendar', 'v3', http=http)\n\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n pt=\"Getting the upcoming latest events\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pt)\n eventsResult = service.events().list(\n calendarId='primary', timeMin=now, maxResults=1, singleEvents=True,\n orderBy='startTime').execute()\n events = eventsResult.get('items', [])\n\n if not events:\n pq=\"No upcoming events found.\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pq)\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n #start1=''.join(start)\n summary=event['summary']\n print start,summary\n requests.get(\"http://localhost:8080/statement?text=\"+start+\" \"+summary)", "def calendar(self, name=None, cal_id=None):\n if name and not cal_id:\n for calendar in self.calendars():\n display_name = calendar.get_display_name()\n if display_name == name:\n return calendar\n if name and not cal_id:\n raise error.NotFoundError(\n \"No calendar with name %s found under %s\" % (name, self.url)\n )\n if not cal_id and not name:\n return self.calendars()[0]\n\n if str(URL.objectify(cal_id).canonical()).startswith(\n str(self.client.url.canonical())\n ):\n url = self.client.url.join(cal_id)\n elif (\n isinstance(cal_id, URL)\n or cal_id.startswith(\"https://\")\n or cal_id.startswith(\"http://\")\n ):\n url = self.url.join(cal_id)\n else:\n url = self.url.join(quote(cal_id) + \"/\")\n\n return Calendar(self.client, name=name, parent=self, url=url, id=cal_id)", "def get_default_calendar(self):\n\n url = self.build_url(self._endpoints.get('default_calendar'))\n\n response = self.con.get(url)\n if not response:\n return None\n\n data = response.json()\n\n # Everything received from cloud must be passed as self._cloud_data_key\n return self.calendar_constructor(parent=self,\n **{self._cloud_data_key: data})", "def create_new_event(self):\n pass", "def create_event(self, name, date):\n user = User.objects.create(username='userdemo')\n user.set_password('calnote24')\n user.save()\n Event.objects.create(name=name, date=date, user_id=user.id)", "def get_calendar_provider(config, src_path):\n economic_config = dict(config.items('Economic'))\n if 'Google' == economic_config['calendar_provider']:\n calendar = CalendarGoogle(config.items('Google'), src_path)\n elif 'Office365' == economic_config['calendar_provider']:\n calendar = CalendarOutlook(config.items('Office365'))\n else:\n print(\"Unsupported calendar provider\")\n sys.exit(1)\n\n return calendar", "def main():\n# year = int(input(\"Enter year for calendar: \"))\n# first_day = first_day_of_year(year)\n\n # Loop through months 1 through 12\n # for month in range(1, NUM_MONTHS + 1):\n# first_day = print_month(first_day, month, year)\n\n canvas = make_canvas(CANVAS_WIDTH, CANVAS_HEIGHT, 'Calendar')\n # present the header, today's date\n\n top_rows(canvas)\n # present two buttons: weekly display and monthly display\n weekly_display_type = True\n date_to_present = date.today()\n #button_weekly(canvas,weekly_display_type,date_to_present)\n #button_monthly(canvas, weekly_display_type, date_to_present)\n # present weekly display\n canvas.update()\n canvas.mainloop()", "def getOrganizerCalendar(self):\n ctool = getToolByName(self, 'portal_cpscalendar')\n org_calendar = ctool.getCalendarForPath(\n self.organizer['rpath'], unrestricted=1)\n return org_calendar", "def calendar_event(self, calendar_event_id):\r\n return CalendarEvent(self, calendar_event_id)", "def refresh_calendar():\n manage.refresh_calendar()", "def get_calendar_event(self):\n start_date = self.cleaned_data.get('start_date', None)\n end_date = self.cleaned_data.get('end_date', None)\n\n if start_date is None or end_date is None:\n return None\n \n message_group = CalendarFullDayMessageGroup(group_name=self.cleaned_data.get('message'))\n message_group.save()\n \n date_to_block = start_date\n while date_to_block <= end_date:\n cal_message = CalendarFullDayMessage(message_group=message_group\n , display_name=self.cleaned_data.get('message')\\\n , start_datetime = datetime.combine(date_to_block, time.min)\n , end_datetime = datetime.combine(date_to_block, time.max)\n ) \n cal_message.save() # save the CalendarMessage\n date_to_block = date_to_block + timedelta(days=1)\n\n return cal_message", "def buildCashCal(self, usrCals):\n self.csh_flw_id = ''\n cash_cal = {\n 'summary': 'Cash Flow',\n 'timeZone': 'America/New_York'\n }\n self.csh_flw_cal = self.service.calendars().insert(body=cash_cal).execute()\n self.csh_flw_id = self.csh_flw_cal['id']\n return self.csh_flw_id, self.csh_flw_cal", "def _get_cal_interface(self):\n plugin = rw_peas.PeasPlugin('rwcal_openstack', 'RwCal-1.0')\n engine, info, extension = plugin()\n cal = plugin.get_interface(\"Cloud\")\n rwloggerctx = rwlogger.RwLog.Ctx.new(\"Cal-Log\")\n rc = cal.init(rwloggerctx)\n assert rc == RwTypes.RwStatus.SUCCESS\n\n return cal", "def calendar_list(self, calendar_id):\r\n return CalendarList(self, calendar_id)", "def _create_event(\n project,\n creator_id,\n datetime_start,\n datetime_end,\n description=\"Test Event\",\n location=\"test_location\",\n is_public=False,\n event_type=\"MN\",\n coordinator=None\n):\n event = Event(\n project=project,\n description=description,\n location=location,\n is_public=is_public,\n datetime_start=datetime_start,\n datetime_end=datetime_end,\n coordinator=coordinator,\n creator_id=creator_id\n )\n event.save()\n return event", "def events(self, calendar=\"\", date=\"\"):\n command = list(CALENDAR_COMMAND)\n\n if calendar:\n command.extend([\"-f\",\n join(self.calendar_directory,\n \"calendar.{}\".format(calendar))])\n\n if date:\n command.extend([\"-t\", \"{}\".format(date)])\n\n calendar_output = subprocess.check_output(command).decode(\"utf-8\")\n # Split the lines and filter the empty lines.\n lines = [line for line in calendar_output.split(\"\\n\") if line]\n lines_copy = list(lines)\n index = 0\n for event in lines:\n if event.startswith(\"\\t\") or event.startswith(\" \"):\n # This line is a continuation of the previous one.\n lines_copy[index - 1] += event\n else:\n lines_copy[index] = event\n index += 1\n\n # Substitute multiple whitespaces by one space.\n events = [' '.join(event.split()) for event in lines_copy[:index]]\n\n # Replace '&' by 'and' because PicoTTS pronounces it as 'ampersand'.\n # See https://github.com/snipsco/snips-issues/issues/85\n events = [event.replace('&', 'and') for event in events]\n\n # Create a sentence with the date and a new sentence with the description.\n # Strip the asterisk (*) after a date. This means the date changes from year to year.\n return [event[:6] + '.' + event[6:].strip(\"*\") for event in events]", "def placeCalendarButton(data,row,target,path,alts,**kwargs):\n# printPretty(\"args: %s %s %s %s\" % (data,row,target,path))\n datebut = gtk.Button()\n datebut.show()\n image = gtk.Image()\n image.set_from_file(\"img/date.png\")\n datebut.set_image(image)\n datebut.unset_flags(gtk.CAN_FOCUS)\n datebut.connect(\"clicked\",dateChoose,target,data,path,alts,kwargs)\n datebut.set_tooltip_text(\"Click to choose date from calendar\")\n row.pack_start(datebut,0,0,2)", "def __init__(self, master=None, **kw):\r\n # remove custom options from kw before initializating ttk.Frame\r\n fwday = kw.pop('firstweekday', calendar.MONDAY)\r\n year = kw.pop('year', self.datetime.now().year)\r\n month = kw.pop('month', self.datetime.now().month)\r\n locale = kw.pop('locale', None)\r\n sel_bg = kw.pop('selectbackground', '#F2074E')\r\n sel_fg = kw.pop('selectforeground', '#05640e')\r\n\r\n self._date = self.datetime(year, month, 1)\r\n self._selection = None # no date selected\r\n\r\n ttk.Frame.__init__(self, master, **kw)\r\n\r\n self._cal = get_calendar(locale, fwday)\r\n\r\n self.__setup_styles() # creates custom styles\r\n self.__place_widgets() # pack/grid used widgets\r\n self.__config_calendar() # adjust calendar columns and setup tags\r\n # configure a canvas, and proper bindings, for selecting dates\r\n self.__setup_selection(sel_bg, sel_fg)\r\n\r\n # store items ids, used for insertion later\r\n self._items = [self._calendar.insert('', 'end', values='')\r\n for _ in range(6)]\r\n # insert dates in the currently empty calendar\r\n self._build_calendar()\r\n\r\n # set the minimal size for the widget\r\n self._calendar.bind('<Map>', self.__minsize)\r\n\r\n # start and stop dates\r\n self.startDate, self.stopDate = None, None", "def test_admin_calendar_admin_add(self):\n response = self.client.get(\"/admin/appointment/calendar/add/\")\n self.assertEqual(response.status_code, 200)", "def create_appointment():\n\n msg = render_template('date')\n return question(msg)", "def name(self):\n return 'Trakt My Upcoming Calendar'", "def bootstrap_calendar_css(*args):\n return render_to_string(\n 'django_bootstrap_calendar/calendar_css.html'\n )", "def get_trading_calendar(start_date: dt.date, end_date: dt.date) -> pd.DataFrame:\n return nyse.schedule(start_date, end_date)", "def __init__(self, email, password):\n\n self.cal_client = gdata.calendar.client.CalendarClient(source='Google-Calendar_Python_Sample-1.0')\n self.cal_client.ClientLogin(email, password, self.cal_client.source);", "def date(self):\n try:\n return datetime.date.fromordinal(self.round)\n except ValueError:\n raise ValueError(\"you need to run ABCE in calendar mode, use simulation.declare_calendar(2000, 1, 1)\")", "def get_tradingview_ecocal(width, height, show_copyright):\n return_data = ''\n theme = get_sa_theme()\n tradingview_copyright = ''\n\n if str(width) == '0':\n width = '\"100%\"'\n if str(height) == '0':\n height = '\"100%\"'\n\n if str(show_copyright) == '1':\n tradingview_copyright = ''+\\\n '<div class=\"tradingview-widget-copyright\">'+\\\n '<a href=\"https://www.tradingview.com/markets/currencies/economic-calendar/\" rel=\"noopener\" target=\"_blank\">'+\\\n '<span class=\"blue-text\">Economic Calendar</span></a> by TradingView'+\\\n '</div>'\n\n return_data = '' +\\\n '<div class=\"tradingview-widget-container\">'+\\\n ' <div class=\"tradingview-widget-container__widget\"></div>'+\\\n tradingview_copyright+\\\n ' <script type=\"text/javascript\" '+\\\n 'src=\"https://s3.tradingview.com/external-embedding/embed-widget-events.js\" async>'+\\\n ' {'+\\\n ' \"colorTheme\": \"'+ theme +'\",'+\\\n ' \"isTransparent\": true,'+\\\n ' \"width\": '+ width +','+\\\n ' \"height\": '+ height +','+\\\n ' \"locale\": \"en\",'+\\\n ' \"importanceFilter\": \"-1,0,1\"'+\\\n '}'+\\\n ' </script>'+\\\n '</div>'\n return return_data", "def func_calendar_list():\r\n creds = None\r\n global page_token\r\n #global new_calendar_list=[]\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'credentials.json', SCOPES)\r\n creds = flow.run_local_server(port=0)\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n\r\n service = build('calendar', 'v3', credentials=creds)\r\n\r\n calendar_list = service.calendarList().list(pageToken=page_token).execute()\r\n new_calendar_list = []\r\n for calendar_list_entry in calendar_list['items']:\r\n new_calendar_list.append(calendar_list_entry['summary'])\r\n page_token = calendar_list.get('nextPageToken')\r\n return (new_calendar_list)", "def setup_event(site):\n folder = site['institucional']['eventos']\n event = folder['1o-ano-do-site']\n acc = IEventAccessor(event)\n future = datetime.now() + relativedelta(years=1)\n year = future.year\n month = future.month\n day = future.day\n acc.start = datetime(year, month, day, 0, 0, 0)\n acc.end = datetime(year, month, day, 23, 59, 59)\n notify(ObjectModifiedEvent(event))\n event.reindexObject()\n logger.debug(u'Evento padrao configurado')", "def create_instance(self, date):\n raise NotImplementedError", "def test_get_calendar(self):\n url, parsed = self.prepare_urls(\n 'v1:activity-calendar', subdomain=self.company.subdomain)\n \n response = self.client.post(url, {'dt': timezone.now()}, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.post(url, {'dt': timezone.now()}, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n content = json.loads(response.content)\n self.assertTrue(content.has_key('calendar_data'))", "def calendar(request, pYear, pMonth):\n lYear = int(pYear)\n lMonth = int(pMonth)\n lCalendarFromMonth = datetime.date(lYear, lMonth, 1)\n lCalendarToMonth = datetime.date(lYear, lMonth, monthrange(lYear, lMonth)[1])\n lComics = Comic.objects.filter(published=True, date__gte=lCalendarFromMonth, date__lte=lCalendarToMonth).order_by('date')\n lCalendar = ArchiveCalendar(lComics).formatmonth(lYear, lMonth)\n lPreviousYear = lYear\n lPreviousMonth = lMonth - 1\n if lPreviousMonth == 0:\n lPreviousMonth = 12\n lPreviousYear = lYear - 1\n lNextYear = lYear\n lNextMonth = lMonth + 1\n if lNextMonth == 13:\n lNextMonth = 1\n lNextYear = lYear + 1\n pmn = named_month(lPreviousMonth)\n nmn = named_month(lNextMonth)\n \n # now for something fun:\n # if we have the first or last comics in a collection, we DON'T want to paginate this!\n fComic = lComics[0]\n lComic = lComics.reverse()[0]\n aComic = fComic.get_first()\n bComic = fComic.get_latest()\n \n \n if aComic is None or fComic.id == aComic.id:\n lPreviousYear = 0\n lPreviousMonth = 0\n if bComic is None or lComic.id == bComic.id:\n lNextYear = 0\n lNextMonth = 0\n \n\n return render(request, 'archive/archive_cal.html', {'Calendar' : mark_safe(lCalendar),\n 'Month' : str(lMonth),\n 'MonthName' : named_month(lMonth),\n 'Year' : str(lYear),\n 'PreviousMonth' : str(lPreviousMonth),\n 'PreviousMonthName' : pmn,\n 'PreviousYear' : str(lPreviousYear),\n 'NextMonth' : str(lNextMonth),\n 'NextMonthName' : nmn,\n 'NextYear' : str(lNextYear),\n })" ]
[ "0.7056294", "0.69618356", "0.69175917", "0.68492675", "0.66771054", "0.6389119", "0.63841164", "0.633121", "0.6214401", "0.6132738", "0.6132738", "0.61323744", "0.60814935", "0.6077951", "0.6074474", "0.605173", "0.603549", "0.6033712", "0.60026586", "0.5928988", "0.58628964", "0.5835569", "0.5823141", "0.580265", "0.5762343", "0.5757049", "0.5740434", "0.5737675", "0.5688385", "0.5678527", "0.56761533", "0.56572145", "0.56430984", "0.56428516", "0.56207204", "0.5607981", "0.5595861", "0.5595861", "0.55840814", "0.557754", "0.5574532", "0.55631185", "0.55619097", "0.55495435", "0.55245703", "0.5503092", "0.54974157", "0.5489613", "0.5489613", "0.5478136", "0.5477829", "0.5455022", "0.545488", "0.54519224", "0.5444297", "0.5435906", "0.5434957", "0.54215336", "0.54088235", "0.53906757", "0.5377979", "0.53748715", "0.53679055", "0.5361061", "0.5328034", "0.5326226", "0.53181285", "0.5311417", "0.53110236", "0.52755916", "0.5261343", "0.5257174", "0.5245781", "0.5230205", "0.52215713", "0.52120805", "0.5202414", "0.5196917", "0.5193496", "0.5177139", "0.51752317", "0.5172718", "0.51647806", "0.5162376", "0.5161491", "0.5152757", "0.5146269", "0.513893", "0.5135851", "0.51309323", "0.51303506", "0.51212317", "0.51148427", "0.51144236", "0.5114073", "0.51138926", "0.5113458", "0.5111029", "0.51059866", "0.5099933" ]
0.5768252
24
For a given reward function and horizon, calculate the MaxEnt policy that gives equal weight to equal reward trajectorie \s
def calcMaxEntPolicy(trans_mat, horizon, r_weights, state_features): n_states = np.shape(trans_mat)[0] n_actions = np.shape(trans_mat)[1] partition = np.zeros((n_states, 1)) policy = np.zeros((n_states, n_actions)) partition[n_states-1] = 1 reward = np.exp(np.dot(r_weights, state_features.T)) # Calculate partition function for each state and policy value for (state,action) for i in range(horizon): new_partition = partition for state in range(n_states): for action in range(n_actions): p = np.array([trans_mat[state, action, new_state]*reward[state]*partition[new_state] for new_state in range(n_states)]) policy[state, action] = np.sum(p) new_partition[state] = np.sum(policy[state, :]) if state == n_states-1: new_partition[state] = 1 partition = new_partition # Local action probability computation for state in range(n_states): for action in range(n_actions): if partition[state] != 0.0: policy[state, action] = policy[state, action] / partition[state] return policy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_policy(env, policy, V, discount_factor):\n\n for state in range(env.nS):\n # for a given state compute state-action value.\n action_values = one_step_lookahead(env, state, V, discount_factor)\n\n # choose the action which maximizes the state-action value.\n policy[state] = np.argmax(action_values)\n\n return policy", "def greedy_policy(self):\n # print(self.weights)\n policy = defaultdict(lambda: 0)\n\n for entry, values in self.weights.items():\n policy[entry] = np.argmax(self.weights[entry])\n # print(policy)\n\n return policy", "def update_policy(self, minibatch_size):\n \n steps = self.rewards.shape[0]\n batch_size = self.rewards.shape[0] * self.rewards.shape[1]\n #steps = 500\n #batch_size = 500\n #print(steps)\n #print(batch_size)\n \n # Compute advantages\n '''\n with torch.no_grad():\n if self.gae:\n advantages = torch.zeros_like(self.rewards).to(self.training_device)\n lastgaelam = 0\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n nextvalues = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t + 1]\n nextvalues = self.state_values[t + 1]\n delta = self.rewards[t] + self.gamma * nextvalues * nextnonterminal - self.state_values[t]\n advantages[t] = lastgaelam = delta + self.gamma * self.gae_lambda * nextnonterminal * lastgaelam\n returns = advantages + self.state_values\n else:\n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n ''' \n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n \n\n # flatten the batch\n #b_obs = self.states.reshape((-1,) + self.state_space)\n #print(self.states.shape)\n b_obs = self.states.reshape((-1,4)).detach()\n b_logprobs = self.action_probs.reshape(-1,1).detach()\n b_actions = self.actions.reshape((-1,)).detach()\n b_advantages = advantages.reshape(-1,1)\n b_returns = returns.reshape(-1,1)\n b_values = self.state_values.reshape(-1,1)\n \n # Optimize policy and value network for K epochs, run optimization in minibatches\n \n inds = np.arange(batch_size)\n for i_epoch_pi in range(self.epochs):\n np.random.shuffle(inds)\n for start in range(0, batch_size, minibatch_size):\n end = start + minibatch_size\n minibatch_ind = inds[start:end]\n mb_advantages = b_advantages[minibatch_ind]\n if self.norm_adv:\n mb_advantages = (mb_advantages - mb_advantages.mean()) / (mb_advantages.std() + 1e-8)\n \n #_, newlogproba, entropy = self.get_action(b_obs[minibatch_ind], b_actions[minibatch_ind])\n newlogproba, entropy = self.evaluate(b_obs[minibatch_ind], b_actions[minibatch_ind])\n #ratio = (newlogproba - b_logprobs[minibatch_ind]).exp()\n ratio = torch.exp((newlogproba - b_logprobs[minibatch_ind].detach()))\n \n # Stats\n approx_kl = (b_logprobs[minibatch_ind] - newlogproba).mean()\n\n # Policy loss\n pg_loss1 = -mb_advantages * ratio\n pg_loss2 = -mb_advantages * torch.clamp(ratio, 1 - self.clip_epsilon, 1 + self.clip_epsilon)\n pg_loss = torch.max(pg_loss1, pg_loss2).mean()\n entropy_loss = entropy.mean()\n\n # Value loss\n _, new_values = self.policy.forward(b_obs[minibatch_ind])\n if self.clip_vloss:\n \n v_loss_unclipped = self.MseLoss(new_values,b_returns[minibatch_ind])\n #v_loss_unclipped = ((new_values - b_returns[minibatch_ind]) ** 2)\n v_clipped = b_values[minibatch_ind] + torch.clamp(new_values - b_values[minibatch_ind],\n -self.clip_epsilon, self.clip_epsilon)\n #v_loss_clipped = (v_clipped - b_returns[minibatch_ind]) ** 2\n v_loss_clipped = self.MseLoss(v_clipped,b_returns[minibatch_ind])\n v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped)\n #v_loss = 0.5 * v_loss_max.mean()\n v_loss = 0.5 * v_loss_max\n else:\n #v_loss = 0.5 * ((new_values - b_returns[minibatch_ind]) ** 2).mean()\n v_loss = self.MseLoss(new_values,b_returns[minibatch_ind])\n\n loss = pg_loss + v_loss * self.vf_coeff - self.ent_coeff * entropy_loss\n\n self.optimizer.zero_grad()\n loss.backward()\n torch.nn.utils.clip_grad_norm_(self.policy.parameters(), self.max_grad_norm)\n self.optimizer.step()\n # Copy new weights into old policy:\n self.old_policy.load_state_dict(self.policy.state_dict())", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n # OUR CODE HERE\n possibleActions = self.mdp.getPossibleActions(state)\n #checking for terminal state (no possible actions)\n if len(possibleActions) is 0: \n return None\n \n #attempt at using the Counter\n eValsActions = util.Counter()\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n return eValsActions.argMax()\n \n #fail attempt using lists :(\n \"\"\"\n #list to hold the expected value of the actions\n eValsActions = []\n #iterate through all actions and their transtion states\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n #expected value of reward with discount * the value of the transitions\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n #now iterate through and find the action with the best value\n #(that will be the best action)\n maxVal = -float(\"inf\")\n bestAction = None\n for action in possibleActions:\n if eValsActions[action] > maxVal:\n maxVal = eValsAction[action]\n bestAction = action\n \"\"\"\n return action\n # END OUR CODE", "def create_greedy_policy(self):\n\n def policy_fn(state):\n return self.actor_baseline.predict([[state]])[0][0]\n\n return policy_fn", "def make_epsilon_greedy_policy(estimator, epsilon, nA):\n def policy_fn(observation):\n A = np.ones(nA, dtype=float) * epsilon / nA\n q_values = estimator.predict(observation)\n# print(q_values)\n best_action = np.argmax(q_values)\n A[best_action] += (1.0 - epsilon)\n return A\n return policy_fn", "def _policy_nn(self):\n with tf.variable_scope(\"reward_params\") as scope:\n \n self.h1 = tf.layers.dense(self.input_ph, self.hidden_dim, tf.nn.tanh,\n kernel_initializer=tf.random_normal_initializer(\n stddev=np.sqrt(1 / self.params_dim)), name=\"h1\")\n self.h2 = tf.layers.dense(self.h1, self.hidden_dim, tf.nn.tanh,\n kernel_initializer=tf.random_normal_initializer(\n stddev=np.sqrt(1 / self.params_dim)), name=\"h2\")\n self.rewards = tf.layers.dense(self.h2, 1,\n kernel_initializer=tf.random_normal_initializer(\n stddev=np.sqrt(1 / self.hidden_dim)), name=\"rewards\")\n self.rewards_sum = tf.reduce_sum(self.rewards)", "def extract_policy(env, v, gamma):\n \n policy = np.zeros(env.nS, dtype=int)\n\n ############################\n # YOUR CODE #\n ############################\n for i in range (env.nS):\n policy[i] = np.argmax([env.P[i][j][0][2] + gamma * v[env.P[i][j][0][1]] for j in range (6)])\n\n return policy", "def extract_policy(env, v, gamma = 1.0):\n policy = np.zeros(env.nS)\n for s in range(env.nS):\n q_sa = np.zeros(env.action_space.n)\n for a in range(env.action_space.n):\n for next_sr in env.P[s][a]:\n # next_sr is a tuple of (probability, next state, reward, done)\n p, s_, r, _ = next_sr\n q_sa[a] += (p * (r + gamma * v[s_]))\n policy[s] = np.argmax(q_sa)\n return policy", "def maxEntIRL(trans_mat, state_features, demos, seed_weights, n_epochs, horizon, learning_rate):\n\tfeature_exp = find_feature_expectations(state_features, demos)\n\n\tn_states = np.shape(trans_mat)[0]\n\tn_actions = np.shape(trans_mat)[1]\n\n\tn_features = np.shape(state_features)[1]\n\tr_weights = np.zeros(n_features) + seed_weights\n\n\t# Probability for initial state trajectories\n\tstart_state_count = np.zeros(n_states)\n\tfor demo in demos:\n\t\tstart_state_count[demo[0]] += 1\n\t\tp_start_dist = start_state_count / np.shape(demos)[0]\n\n\t# Iterate\n\tfor epoch in range(n_epochs):\n\t\t# print(\"epoch: {}\".format(epoch))\n\n\t\t# Calculate Max Ent Policy\n\t\tpolicy = calcMaxEntPolicy(trans_mat, horizon, r_weights, state_features)\n\n\t\t# Calculate Expected State Frequency\n\t\texpected_svf = calcExpectedStateFreq(trans_mat, horizon, p_start_dist, policy)\n\n\t\t# Update reward weights using gradient\n\t\tgradient = feature_exp - expected_svf.T.dot(state_features)\n\t\tr_weights += learning_rate * gradient\n\t\tprint epoch, np.linalg.norm(gradient)\n\n\tprint policy\n\tprint policy.argmax(axis=1)\n\treturn r_weights", "def make_epsilon_greedy_policy(estimator, nA):\n def policy_fn(sess, observation, epsilon):\n A = np.ones(nA, dtype=float) * epsilon / nA\n q_values = estimator.predict(sess, np.expand_dims(observation, 0))[0]\n print(f'q_values: {q_values}')\n best_action = np.argmax(q_values)\n A[best_action] += (1.0 - epsilon)\n return A\n return policy_fn", "def extract_policy(env, v, GAMMA=1.0):\r\n policy = np.zeros(env.nS)\r\n for s in range(env.nS):\r\n q_sa = np.zeros(env.nA)\r\n for a in range(env.nA):\r\n q_sa[a] = sum([p*(r+GAMMA*v[s_]) for p, s_, r, _ in env.P[s][a]])\r\n policy[s] = np.argmax(q_sa)\r\n return policy", "def _calc_policy(self, V: np.ndarray) -> np.ndarray:\n policy = np.zeros([self.state_dim, self.action_dim])\n \n for s in self.mdp._state_dict:\n action_dict = {}\n compare = 0.0\n for a in self.mdp._action_dict:\n temp = 0.0\n for next_s in self.mdp._state_dict:\n p = self.mdp.P[self.mdp._state_dict[s],self.mdp._action_dict[a],self.mdp._state_dict[next_s]]\n r = self.mdp.R[self.mdp._state_dict[s],self.mdp._action_dict[a],self.mdp._state_dict[next_s]]\n Vs = V[self.mdp._state_dict[next_s]]\n temp = temp + p * (r + self.gamma * Vs)\n compare = max(compare, temp)\n action_dict[a]= temp\n res = [t for t,v in action_dict.items() if v == compare][0]\n policy[self.mdp._state_dict[s],self.mdp._action_dict[res]] = 1.0\n\n return policy", "def update(self, state, action, nextState, reward):\n \"\"\"Description:\n Use second equation in slide 71 of MDP\n Adjest weight of active features depend on tranistion \n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n feat = self.featExtractor.getFeatures(state, action)\n\n # if weight is empty, then weight will need to initial to 1 for all features\n # According to which Extractor user choose, weight counter will have equal number of keys.\n if len(self.weight) == 0:\n feat = self.featExtractor.getFeatures(state, action)\n self.weight.incrementAll(feat.keys(), 1)\n \n maxQns = self.getValue(nextState)\n if maxQns == None:\n maxQns = 0\n Qsa = self.getQValue(state, action)\n difference = ( reward + self.discountRate * maxQns ) - Qsa\n \n for key in self.weight.keys():\n self.weight[key] += (self.alpha * difference * feat[key])\n \n \n \"\"\" END CODE \"\"\"", "def extract_policy(env, v, gamma = 1.0):\n number_of_states = env.unwrapped.nS\n\n policy = np.zeros(number_of_states)\n for s in range(number_of_states):\n q_sa = np.zeros(env.action_space.n)\n for a in range(env.action_space.n):\n for next_sr in env.unwrapped.P[s][a]:\n # next_sr is a tuple of (probability, next state, reward, done)\n p, s_, r, _ = next_sr\n q_sa[a] += (p * (r + gamma * v[s_]))\n policy[s] = np.argmax(q_sa)\n return policy", "def convert_value_to_policy(env, V, obs, gamma):\n adjacent_states = env.P[obs]\n action_values = []\n for movement, state in adjacent_states.items():\n action_value = 0.0\n for prob, next_state, reward, done in state:\n action_value += prob * (reward + gamma * V[next_state])\n action_values.append(action_value)\n return np.argmax(action_values)", "def policy(matrix, weight):\n # for each column of weights, sum (matrix[i] * weight[i]) using dot product\n dot_product = matrix.dot(weight)\n # find the exponent of the calculated dot product\n exp = np.exp(dot_product)\n # policy is exp / sum(exp)\n policy = exp / np.sum(exp)\n return policy", "def reward_calc(state, action, setpoint):\r\n return max(-np.square(state - setpoint) - action, -150)", "def evaluate_policies(self):\n combined_rewards = {}\n previous_current_policy = self.current_policy\n for policy in self.policy_list:\n mean_reward_f = lambda x: (sum(x)/len(x))\n value = mean_reward_f(policy.rewards_list)\n #print(f\" Memory length: {policy.memory_length}. Mean reward: {value}.\")\n combined_rewards[policy.name_id] = value\n best_policy = self.find_max_value(combined_rewards)\n if self.current_policy.name_id in best_policy:\n print(f\" Still using memory length {self.current_policy.memory_length}. Turn: {self.turns}.\") \n else:\n self.policy_switching(best_policy)\n if self.current_policy != previous_current_policy:\n print(f\"Switched to memory length {self.current_policy.memory_length}. Turn: {self.turns}.\")", "def policy_eval(policy, env, discount_factor=1.0, theta=0.00001):\n\n # Start with a random (all 0) value function\n V = np.zeros(env.nS)\n \n while True: #any(Vdiff > theta):\n \n delta_V = 0\n\n for i in range(env.nS):\n \n # need to calculate the value of taking each of the available actions\n\n action_val = np.zeros(env.nA)\n\n for a in range(env.nA):\n \n # get transition tuple for this state and action\n tup = env.P[i][a][0]\n \n # calculate the value of this action/state? \n # value = reward + gamma * (prob * V[next_state])\n # error here I think, probability missing\n action_val[a] = tup[0] * (tup[2] + discount_factor * V[tup[1]])\n \n \n Vold = V[i]\n Vnew = np.dot(policy[i],action_val)\n delta_V = max(delta_V,np.abs(Vnew - Vold))\n # get state value by multiplying probability of taking action (policy) by action value\n V[i] = Vnew\n \n #print(action_val)\n #print(policy[i])\n #print(V[i])\n #print(delta_V)\n\n # function only works if I use this delta rule to terminate\n if delta_V < theta:\n break\n return np.array(V)", "def reward_function(self):\r\n def R(state, decision, nodes):\r\n return -1.0/1000*nodes['G'].get_preds_value(state)*(decision['G:R_1']+decision['G:L'])\r\n \r\n return R", "def make_epsilon_greedy_policy(estimator, nA):\n def policy_fn(sess, observation, epsilon):\n A = np.ones(nA, dtype=float) * epsilon / nA\n q_values = estimator.predict(sess, np.expand_dims(observation, 0))[0]\n best_action = np.argmax(q_values)\n A[best_action] += (1.0 - epsilon)\n return A\n return policy_fn", "def policy_evaluation(P, nS, nA, policy, gamma=0.9, tol=1e-8):\n value_function = np.zeros(nS)\n ############################\n # YOUR IMPLEMENTATION HERE #\n def next_state_reward(P,state,action,gamma,value_function):\n sum_reward=0\n for p,nextS,r,boolean_v in P[state][action]:\n sum_reward+=p*( r + gamma* value_function[nextS])\n #print(sum_reward) \n return sum_reward\n\n while True:\n delta=0;\n for state in range(nS):\n new_value=0;\n for action in range(nA):\n sum_reward=next_state_reward(P,state,action,gamma,value_function)\n new_value+=policy[state][action]*sum_reward\n delta= max(delta, abs(new_value-value_function[state]))\n value_function[state] = new_value\n #print(value_function)\n if(delta < tol):\n break\n\n ############################\n return value_function", "def get_greedy(self, v):\n policy = np.empty(v.shape, dtype=int)\n for i in range(self.N):\n for j in range(self.N):\n v1 = self.theta[i] + self.epsilon[j] + self.beta * v[i, j]\n v2 = (self.theta[i] + self.G_mean + self.beta *\n np.dot(v[i, :], self.G_probs))\n v3 = (self.G_mean + self.F_mean + self.beta *\n np.dot(self.F_probs, np.dot(v, self.G_probs)))\n if v1 > max(v2, v3):\n action = 1\n elif v2 > max(v1, v3):\n action = 2\n else:\n action = 3\n policy[i, j] = action\n\n return policy", "def make_epsilon_greedy_policy(estimator, epsilon, nA):\r\n def policy_fn(observation):\r\n A = np.ones(nA, dtype=float) * epsilon / nA\r\n q_values = estimator.predict(observation)\r\n best_action = np.argmax(q_values)\r\n A[best_action] += (1.0 - epsilon)\r\n return A\r\n return policy_fn", "def createEpsilonGreedyPolicy(Q, epsilon, num_actions):\n\n def policyFunction(state):\n Action_probabilities = np.ones(num_actions,\n dtype=float) * epsilon / num_actions\n\n best_action = np.argmax(Q[state])\n Action_probabilities[best_action] += (1.0 - epsilon)\n return Action_probabilities\n\n return policyFunction", "def createEpsilonGreedyPolicy(Q, epsilon, num_actions):\n def policyFunction(state):\n\n Action_probabilities = np.ones(num_actions,\n dtype = float) * epsilon / num_actions\n\n best_action = np.argmax(Q[state])\n Action_probabilities[best_action] += (1.0 - epsilon)\n return Action_probabilities\n\n return policyFunction", "def _policy_improvement(self) -> Tuple[np.ndarray, np.ndarray]:\n # Start with a (random) policy\n policy = np.zeros([self.state_dim, self.action_dim])\n V = np.zeros([self.state_dim])\n #random init the policy\n for s in range(self.state_dim):\n policy[s,0] = 0.0\n policy[s,1] = 0.0\n policy[s,2] = 1.0\n\n V = self._policy_eval(policy)\n\n policy_stable = False\n dr = 0.9\n\n while (policy_stable != True):\n policy_stable = True\n for s in self.mdp._state_dict:\n old_action = (policy[self.mdp._state_dict[s]]).tolist()\n action_dict = {}\n for a in self.mdp._action_dict:\n temp = 0.0\n for next_s in self.mdp._state_dict:\n p = self.mdp.P[self.mdp._state_dict[s],self.mdp._action_dict[a],self.mdp._state_dict[next_s]]\n r = self.mdp.R[self.mdp._state_dict[s],self.mdp._action_dict[a],self.mdp._state_dict[next_s]]\n Vs = V[self.mdp._state_dict[next_s]]\n temp = temp + p * (r + dr * Vs)\n action_dict[self.mdp._action_dict[a]]= temp \n max_act = max(action_dict.values())\n V[self.mdp._state_dict[s]] = max_act\n res = [t for t,v in action_dict.items() if v == max_act][0]\n for opt in range(self.action_dim):\n if opt == res:\n policy[self.mdp._state_dict[s],opt] = 1.0\n else:\n policy[self.mdp._state_dict[s],opt] = 0.0\n if (old_action - policy[self.mdp._state_dict[s]]).any() == True:\n \n policy_stable = False\n if policy_stable == False:\n V = self._policy_eval(policy)\n \n return policy, V", "def acquisition_function_expected_policy_divergence(\n gp_reward_model: BasicGPRewardModel,\n) -> int:\n (\n candidate_queries_gp_repr,\n candidate_queries_linear_combination,\n candidate_queries_gp_repr_idx,\n ) = gp_reward_model.get_candidate_queries_gp_repr()\n\n # mu_pred, sigma_pred = gp_reward_model.get_candidate_queries_reward_predictions()\n mu_pred, cov_pred = gp_reward_model.gp_model.predict_multiple(\n candidate_queries_gp_repr,\n linear_combination=candidate_queries_linear_combination,\n )\n sigma_pred = np.diag(cov_pred)\n\n if gp_reward_model.environment_is_tabular:\n\n def policy_distance(policy1, policy2):\n return np.sum(policy1.matrix != policy2.matrix)\n\n else:\n\n def policy_distance(policy1, policy2):\n return np.sum(np.square(policy1.matrix - policy2.matrix))\n\n max_diff = 0\n orig_policy = _get_reward_model_policy(gp_reward_model)\n next_x = [0]\n for i in range(len(candidate_queries_gp_repr)):\n gp_repr = candidate_queries_gp_repr[i]\n linear_combination = candidate_queries_linear_combination[i]\n obs = (gp_repr, linear_combination)\n\n # print(i)\n policy_upper = _get_reward_model_policy(\n gp_reward_model, temporary_observation=(obs, mu_pred[i] + sigma_pred[i])\n )\n\n diff_i = policy_distance(policy_upper, orig_policy)\n\n lower_bound = True\n if lower_bound:\n policy_lower = _get_reward_model_policy(\n gp_reward_model,\n temporary_observation=(obs, mu_pred[i] - sigma_pred[i]),\n )\n diff_i += policy_distance(policy_lower, orig_policy)\n\n if diff_i > max_diff:\n max_diff = diff_i\n next_x = [i]\n elif diff_i == max_diff:\n next_x.append(i)\n\n return candidate_queries_gp_repr_idx[np.random.choice(next_x)]", "def policies(self, QTable, epsilon, state, next_states, action_to_do): # Inspiration from https://www.geeksforgeeks.org/q-learning-in-python/?fbclid=IwAR1UXR88IuJBhhTakjxNq_gcf3nCmJB0puuoA46J8mZnEan_qx9hhoFzhK8\r\n num_actions = 5 # 5 actions-value, [moved_out, into_goal, send_opp_home, send_self_home, move_token] \r\n def epsilonGreedyPolicy(): \r\n tmp_state = str(state.state[0])\r\n valid_actions = np.append(action_to_do, True) # the True appended is move_token\r\n valid_act_len = len(np.where(valid_actions==True)[0])\r\n\r\n Action_probabilities = np.ones(num_actions, dtype = float) * epsilon / valid_act_len # divides probability based on number of valid actions and epsilon (each 0.025 if 4 actions) \r\n Action_probabilities = np.multiply(Action_probabilities, valid_actions)\r\n\r\n # If same values in QTable choose random valid action \r\n best_action = np.argmax(QTable[tmp_state]) # Find index of action which gives highest QValue\r\n # Check if valid action else find new best action\r\n if not valid_actions[best_action]:\r\n actions = np.argsort(-QTable[tmp_state]) # descending order of action values\r\n for i in range(len(valid_actions)):\r\n if valid_actions[actions[i]]:\r\n best_action = actions[i]\r\n break\r\n\r\n Action_probabilities[best_action] += (1.0 - epsilon) # Assigns rest probability to best action so probability sums to 1\r\n\r\n return Action_probabilities \r\n\r\n def greedyPolicy():\r\n tmp_state = str(state.state[0])\r\n valid_actions = np.append(action_to_do, True) # the True appended is move_token\r\n\r\n Action_probabilities = np.zeros(num_actions, dtype = float)\r\n\r\n best_action = np.argmax(QTable[tmp_state]) # Find index of action which gives highest QValue\r\n # Check if valid action else find new best action\r\n if not valid_actions[best_action]:\r\n actions = np.argsort(-QTable[tmp_state]) # descending order of action values\r\n for i in range(len(valid_actions)):\r\n if valid_actions[actions[i]]:\r\n best_action = actions[i]\r\n break\r\n\r\n\r\n Action_probabilities[best_action] += 1.0\r\n return Action_probabilities\r\n\r\n\r\n if(self.__chosenPolicy == \"epsilon greedy\"):\r\n return epsilonGreedyPolicy \r\n if(self.__chosenPolicy == \"greedy\"):\r\n return greedyPolicy", "def policy_eval_v(policy, env, discount_factor=1.0, theta=0.00001):\n # Start with an all 0 value function\n V = np.zeros(env.nS)\n \n # loop door alle states heen \n # sla de oude state value op \n # Bereken de nieuwe state value door de SOM (kans omhoog * loop over waar je terrecht kunt komen * reward) kans omlaag..\n # kijk of je nog door moet gaan of stoppen\n delta = 1000 \n while delta > theta:\n # for x in range(2):\n delta = 0\n \n# loop throw possible states\n for state in range(env.nS):\n old_state_value = V[state]\n new_state_value = 0\n\n # loop shrow possible actions in state\n for action in range(env.nA):\n\n # print(\"kans omhoog\", policy[state][action])\n # print(\"kans omhoog uitkomen\", env.P[state][action][0][0])\n # print(\"direct reward\",env.P[state][action][0][2] )\n # print(\"value of that new state\", discount_factor * V[env.P[state][action][0][1]] )\n\n current_state_value = policy[state][action] * env.P[state][action][0][0] * ( env.P[state][action][0][2] + ( discount_factor * V[env.P[state][action][0][1]] ) ) \n# print(\"current state value\", current_state_value)\n new_state_value += current_state_value\n \n delta = max(delta, abs(old_state_value - new_state_value))\n V[state] = new_state_value\n# print(V[state])\n# print(\"delta\", delta)\n return np.array(V)", "def getPolicy(self, state):\n \"\"\"Description:\n Find all of q-values of current state, and choose the action \n with the hight q-value as optimal policy\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n legalActions = self.getLegalActions(state)\n action = None\n policy = util.Counter() # use counter to store action and its q-value\n \n if len(legalActions) == 0:\n return action\n \n for a in legalActions:\n policy[a] = self.getQValue(state, a)\n action = policy.argMax()\n return action\n\n \"\"\" END CODE \"\"\"", "def policyImprv(P,R,gamma,policy,v):\n def one_step_lookahead(s, V):\n \"\"\"\n :param state: current state\n :param v: current value estimator\n :return: A, list of optimal action values under current value estimator\n \"\"\"\n num_a = policy.shape[1]\n A = np.zeros(num_a)\n for a in range(num_a):\n for s_prime in range(num_S):\n A[a] += P[s, a, s_prime] * (R[s, a, s_prime] + gamma * V[s_prime])\n return A\n\n # initialization \n num_S, num_a = policy.shape\n policy_stable = True\n\n for s in range(num_S):\n\n chosen_a = np.argmax(policy[s])\n\n action_values = one_step_lookahead(s, v)\n best_a = np.argmax(action_values)\n\n if chosen_a != best_a:\n policy_stable = False\n\n for i in range(num_a):\n if i != best_a:\n policy[s][i] = 0\n if i == best_a:\n policy[s][best_a] = 1\n return policy, policy_stable", "def reward_func(self, state):\n if abs(state['theta']) < 1:\n return 2.0\n\n elif abs(state['theta']) < 3:\n return 0.0\n\n elif abs(state['theta']) > 30:\n return -100.0\n return -2.0", "def make_epsilon_greedy_policy(self, Q, epsilon, nA):\n\n def policy_fn(observation,p):\n A = np.ones(nA, dtype=float) * epsilon / nA\n q=Q(observation,p)\n\n best_action = np.argmax(q)\n print(\"action called:\",self.env.action_labels[best_action])\n A[best_action] += (1.0 - epsilon)\n return A\n\n return policy_fn", "def optimise_policy(self, elapsed_episodes):\n\n # Stack gathered data for torch processing\n log_action_prob_stack = torch.stack(self.log_action_prob_list, dim=0).to(self.train_device).squeeze(-1)\n rewards_stack = torch.stack(self.tensor_rewards_list, dim=0).to(self.train_device).squeeze(-1)\n\n # Reset storage variables for following learning\n self.reset()\n\n # Discount rewards with gamma parameter, center and normalise data\n discounted_rewards = discount_rewards(rewards_stack, self.gamma)\n discounted_rewards -= torch.mean(discounted_rewards)\n discounted_rewards /= torch.std(discounted_rewards)\n\n # Weight the log_probabilities by the discounted rewards\n # to give more value to the more rewarding actions.\n # We take the negative of the probabilities to compute the value as a loss.\n weighted_probs = (-log_action_prob_stack) * discounted_rewards\n\n # Actual backpropagation, minimising loss\n loss = torch.sum(weighted_probs)\n loss.backward()\n\n if (elapsed_episodes + 1) % self.batch_size == 0:\n self.update_policy()", "def make_epsilon_greedy_policy(Q, epsilon, nA):\n def policy_fn(observation):\n A = np.ones(nA, dtype=float) * epsilon / nA\n best_action = np.argmax(Q[observation])\n A[best_action] += (1.0 - epsilon)\n return A\n return policy_fn", "def policy_improvement(P, nS, nA, value_from_policy, policy, gamma=0.9):\n\n\tnew_policy = np.zeros(nS, dtype='int')\n\n\t############################\n\t# YOUR IMPLEMENTATION HERE #\n\tfor s in range(nS):\n\t\tq_values = np.zeros(nA)\n\t\tfor action in range(nA):\n\t\t\tcurrent_q_value = 0\n\t\t\tfor transition in P[s][action]:\t# for every possible transition\n\t\t\t\t# print(len(P[s][action]))\n\t\t\t\tprobability = transition[0]\n\t\t\t\treward = transition[2]\n\t\t\t\tnext_state = transition[1]\n\t\t\t\tvalue_next_state = value_from_policy[next_state]\n\n\t\t\t\tcurrent_q_value += probability * (reward + gamma * value_next_state)\n\n\t\t\tq_values[action] = current_q_value\n\n\t\tnew_policy[s] = np.argmax(q_values)\n\n\n\t# print(new_policy)\n\t############################\n\treturn new_policy", "def end_of_horizon_cost_rule(_m):\r\n\r\n return (m.DELTA[m.Y.last()] / m.INTEREST_RATE) * (m.OP[m.Y.last()] + m.FOM[m.Y.last()])\r\n # return (m.DELTA[m.Y.last()] / m.INTEREST_RATE) * m.FOM[m.Y.last()]\r", "def get_reward(self):\n #original reward function: reward = 1.-.3*(abs(self.sim.pose[:3] - self.target_pos)).sum()\n thrusts = self.sim.get_propeler_thrust(self.sim.prop_wind_speed)\n linear_forces = self.sim.get_linear_forces(thrusts)\n distance = np.linalg.norm(self.target_pos - self.sim.pose[:3])\n #speed = math.sqrt(np.square(self.sim.find_body_velocity()).sum())\n #with 300x300x300m env, the max distance from one corner to another is 519\n max_distance = 519\n #Focus quadcopter on not crashing but first rewarding an upward linear force until at the height of the target\n if self.sim.pose[2] < self.target_pos[2]:\n #velocity_discount = 1/speed\n reward = np.tanh(linear_forces[2])\n #after getting to the correct z-coordinate, move to the correct y-coordinate\n elif self.sim.pose[1] < self.target_pos[1]:\n #velocity_discount = 1/speed\n reward = 1 + np.tanh(linear_forces[1])\n #finally, after getting rewards for the x and y coordinates, give reward for distance\n #at this stage, the drone will have overshot the x and y coordinates, but it would be in a better area to\n #start searching for the x coordinate\n elif distance > 1 and self.sim.pose[2] > self.target_pos[2] and self.sim.pose[1] > self.target_pos[1] :\n reward = 2 + (1-math.pow((distance/300),.04))\n elif distance < 1:\n self.success = True\n reward = 100\n #possible reward for hover: np.exp(-np.square(linear_forces[2]))\n return reward", "def calculate_controller_reward(self, controller1, controller2):", "def reward_calc(self, reward_traj,V,V_end):\n r_all = np.concatenate((reward_traj,[V_end]),-1)\n V_all = V #np.concatenate((V,[V_end]),-1)\n delta = r_all[:-1] + self.gamma * V_all[1:] - V_all[:-1]\n \n adv = Generalized_Adv_Estimator.discounted_sum(delta,self.gamma*self.lam)\n rtg = adv + V_all[:-1]\n\n adv = adv.astype('float32')\n rtg = rtg.astype('float32')\n\n return adv, rtg", "def objective(self,w):\n diffs = self.get_y_times_diffs(self.get_split_weights(w))\n #print diffs, sigmoid(diffs)\n obj = -np.sum(np.log(sigmoid(diffs))) #negative, since minimising\n # regularisation\n obj += 0.5 * self.alpha * np.dot(w[:self.interp_index[0]], w[:self.interp_index[0]])\n return obj", "def loss_function(agent, trajectories):\n # All ALL_CAPS variables are constants.\n\n # QUESTIOM: The trajectories already have behavior_logits, why is the need\n # to calculate the target_logits?\n # trajectories shape: list of trajectory\n # target_logits: ArgsActionLogits\n target_logits, baselines = agent.unroll(trajectories)\n\n trajectories = U.stack_namedtuple(trajectories) \n trajectories = U.namedtuple_zip(trajectories) \n\n loss_actor_critic = 0.\n if True:\n rewards = torch.tensor(trajectories.reward, dtype=torch.float32, device=device)\n print(\"trajectories.reward\", rewards) if debug else None \n print(\"trajectories.reward.shape\", rewards.shape) if debug else None\n\n # use normalize\n if False:\n scale_dim = 1\n rewards = (rewards - torch.mean(rewards, dim=scale_dim, keepdim=True)) / (torch.std(rewards, dim=scale_dim, keepdim=True) + 1e-9)\n\n print(\"trajectories.reward\", rewards) if debug else None \n print(\"trajectories.reward.shape\", rewards.shape) if debug else None\n\n lambda_loss = td_lambda_loss(baselines[0], rewards, trajectories)\n print(\"lambda_loss:\", lambda_loss) if 1 else None\n loss_actor_critic += (10. * lambda_loss)\n\n # we add the split_vtrace_pg_loss\n pg_loss = split_vtrace_pg_loss(target_logits, baselines[0], rewards, trajectories)\n print(\"pg_loss:\", pg_loss) if 1 else None\n loss_actor_critic += (1.0 * pg_loss)\n\n UPGO_WEIGHT = 1.0\n loss_upgo = UPGO_WEIGHT * split_upgo_loss(target_logits, baselines[0], trajectories)\n print(\"loss_upgo:\", loss_upgo) if debug else None\n\n # note: we want to maximize the entropy\n # so we gradient descent the -entropy\n # Original AlphaStar pseudocode is wrong\n # AlphaStar: loss_ent = entropy_loss(trajectories.behavior_logits, trajectories.masks)\n loss_ent = 3 * (- entropy_loss_for_all_arguments(target_logits, trajectories.masks))\n print(\"loss_ent:\", loss_ent) if 1 else None\n\n #loss_all = target_logits.action_type.sum()\n loss_all = loss_actor_critic + loss_ent # + loss_upgo\n\n loss_list = [lambda_loss, pg_loss, loss_upgo, loss_ent]\n\n return loss_all, loss_list", "def softmax_policy(Qvalues_oa):\n betaQoa = beta * Qvalues_oa\n betaQoa_ = betaQoa - betaQoa.mean(-1, keepdims=True)\n expQoa = np.exp(betaQoa_)\n assert not np.any(np.isinf(expQoa)), \"behavior policy contains infs\"\n return expQoa / expQoa.sum(axis=-1, keepdims=True)", "def execute_policy_and_get_cost(curr_node, reward_machines, policy_bank, tester, new_task_rm, new_task_u1,\n bound=np.inf):\n game = copy.deepcopy(curr_node.parent_state)\n num_features = len(game.get_features())\n s1, s1_features = game.get_state_and_features()\n curr_policy = curr_node.policy\n curr_policy_rm = reward_machines[curr_policy[0]]\n\n bonus = []\n for t in range(tester.testing_params.num_steps):\n a = policy_bank.get_best_action(curr_policy[0], curr_policy[1],\n s1_features.reshape((1, num_features)), add_noise=False)\n game.execute_action(a)\n # game.render()\n s2, s2_features = game.get_state_and_features()\n curr_policy_u2 = curr_policy_rm.get_next_state(curr_policy[1], game.get_true_propositions())\n new_task_u2 = new_task_rm.get_next_state(new_task_u1, game.get_true_propositions())\n\n desired_next_state = curr_policy_rm.get_next_state(curr_policy[1], curr_policy[2])\n\n r = new_task_rm.get_reward(new_task_u1, new_task_u2, s1, a, s2)\n if curr_policy_u2 == desired_next_state:\n logger.info(\"EXECUTED ACTION {}, CAN GO TO NEXT LEVEL\".format(curr_policy[2]))\n return t + 1, game, new_task_u2, r, bonus\n elif curr_policy_u2 == curr_policy[1]:\n logger.info(\"STILL FOLLOWING CURRENT POLICY {}, CONTINUE\".format(curr_policy[2]))\n if new_task_u2 != new_task_u1:\n logger.info(\n \"ENCOUNTERED EVENT {} WHILE FOLLOWING {}\".format(game.get_true_propositions(), curr_policy[2]))\n bonus.append(game.get_true_propositions())\n # else:\n # curr_policy_u2 = curr_policy[1]\n # print(game.get_true_propositions())\n # print(\"OOPS, WRONG WAY\")\n # return np.inf, game, new_task_u1, r, bonus\n\n if game.is_env_game_over() or t + 1 >= bound:\n return np.inf, game, new_task_u2, r, bonus\n\n s1, s1_features = s2, s2_features\n new_task_u1 = new_task_u2\n\n return np.inf, game, new_task_u1, 0, bonus", "def getReward(self):\n# def evaluateFitness(self):\n fitness = 0.0\n distance = self.env.getDistance()\n speed = self.env.getSpeed()\n theta = self.env.getOrientation()\n\n ## implementation 101\n timeBonus = (self.maxTime - self.t)/self.maxTime\n alpha = 1.0/((1+distance)*(1+fabs(theta))*(speed+1));\n if distance < 0.5*self.env.init_distance :\n if(distance < self.env.vicinity_distance and\n abs(theta) < self.env.vicinity_orientation and\n speed < self.env.vicinity_speed ):\n fitness = 1 + timeBonus; \n else:\n fitness = alpha;\n else: fitness = 0\n self.lastFitness = fitness\n if fitness > self.bestFitness : \n self.bestFitness = fitness \n\n return fitness", "def policy_gamble (self):\n\t\tidx = self.idx \t\t\t\t# internal time index of state\n\t\tprobs = self.probs\t\t\t# prob of reward for an action\n\t\tbeta = self.beta\t\t\t# inverse temp \n\n\t\t# softmax\n\t\tAct = beta*self.Q[idx]\n\t\tp = 1./(1. + np.exp(-Act))\t# probability of gamble\n\t\tself.SM[idx] = p\n\n\t\t# decide whether to take gamble based on p\n\t\trnd = np.random.random_sample()\n\t\tif rnd < p:\n\t\t\tC = 1\t# gamble\n\t\telse:\n\t\t\tC = 0\t# no gamble\n\t\tself.C[idx] = C\n\n\t\t# no gamble\n\t\tif C == 0:\t\n\t\t\treward = 0\t\t # gamble reward encoded relative to reward\n\t\t\tself.R[idx] = -1 # rewarded sure thing, coded as -1\n\t\t\tself.PE[idx] = 0 # no PE, get the thing you expected\n\t\t# gamble\n\t\telse:\n\t\t\t# decide whether a reward is delivered\n\t\t\treward = np.random.binomial(size=1, n=1, p=probs)[0]\n\t\t\tself.R[idx] = reward # indicator that reward was received\n\t\t\tif reward == 0:\n\t\t\t\treward = self.l_mag\n\t\t\telse:\n\t\t\t\treward = self.r_mag\n\t\t\tself.PE[idx] = reward - self.Q[idx]", "def extractPolicy(self, V):\n\n policy = np.zeros(self.nStates)\n for i in range(self.nStates):\n A = self.helper(i, V)\n best_action = np.argmax(A)\n policy[i] = best_action\n\n return policy", "def policy_eval(env, policy, V, discount_factor):\n policy_value = np.zeros(env.nS)\n for state, action in enumerate(policy):\n for probablity, next_state, reward, info in env.P[state][action]:\n policy_value[state] += probablity * (reward + (discount_factor * V[next_state]))\n\n return policy_value", "def policy_improvement(P, nS, nA, value_from_policy, gamma=0.9):\n\n new_policy = np.ones([nS, nA]) / nA\n for state_idx in range(nS):\n new_action_idx = np.argmax(calc_action_function(state_idx, value_from_policy, nA, P, gamma))\n new_policy[state_idx] = np.eye(nA)[new_action_idx]\n return new_policy", "def policy_eval(W, env, with_discount=False):\n rewards = []\n for i in range(NUMBER_OF_EVAL_SIMS):\n state = normalize_state(env.reset())\n\n run_reward = 0\n is_done = False\n steps = 0\n while not is_done:\n state, reward, is_done, _ = env.step(get_next_best_action(state, W))\n state = normalize_state(state)\n steps += 1\n if with_discount:\n run_reward += reward * (gamma ** steps)\n else:\n run_reward += reward\n\n rewards.append(run_reward)\n\n return np.mean(rewards)", "def compute_pg_vars(trajs, policy, baseline, discount, gae_lambda):\n for traj in trajs:\n # Include the last observation here, in case the trajectory is not finished\n baselines = baseline.predict(np.concatenate(\n [traj[\"observations\"], [traj[\"last_observation\"]]]))\n if traj['finished']:\n # If already finished, the future cumulative rewards starting from the final state is 0\n baselines[-1] = 0.\n # This is useful when fitting baselines. It uses the baseline prediction of the last state value to perform\n # Bellman backup if the trajectory is not finished.\n traj['returns'] = compute_cumulative_returns(\n traj['rewards'], baselines, discount)\n traj['advantages'] = compute_advantages(\n traj['rewards'], baselines, discount, gae_lambda)\n traj['baselines'] = baselines[:-1]\n\n # First, we compute a flattened list of observations, actions, and advantages\n all_obs = np.concatenate([traj['observations'] for traj in trajs], axis=0)\n all_acts = np.concatenate([traj['actions'] for traj in trajs], axis=0)\n all_advs = np.concatenate([traj['advantages'] for traj in trajs], axis=0)\n all_dists = {\n k: np.concatenate([traj['distributions'][k] for traj in trajs], axis=0)\n for k in trajs[0]['distributions'].keys()\n }\n\n # Normalizing the advantage values can make the algorithm more robust to reward scaling\n all_advs = (all_advs - np.mean(all_advs)) / (np.std(all_advs) + 1e-8)\n\n # Form chainer variables\n all_obs = Variable(all_obs)\n all_acts = Variable(all_acts)\n all_advs = Variable(all_advs.astype(np.float32, copy=False))\n all_dists = policy.distribution.from_dict(\n {k: Variable(v) for k, v in all_dists.items()})\n\n return all_obs, all_acts, all_advs, all_dists", "def pred(W, X):\n A = softmax_stable(X.dot(W))\n return np.argmax(A, axis = 1)", "def train(self, obs, acs, rews_list, next_obs, terminals):\n\n # step 1: calculate q values of each (s_t, a_t) point, \n # using rewards from that full rollout of length T: (r_0, ..., r_t, ..., r_{T-1})\n q_values = self.calculate_q_vals(rews_list)\n\n # step 2: calculate advantages that correspond to each (s_t, a_t) point\n advantage_values = self.estimate_advantage(obs, q_values)\n\n # step 3:\n # TODO: pass the calculated values above into the actor/policy's update, \n # which will perform the actual PG update step\n\n # TODO: define the loss that should be optimized when training a policy with policy gradient\n # HINT1: Recall that the expression that we want to MAXIMIZE\n # is the expectation over collected trajectories of:\n # sum_{t=0}^{T-1} [grad [log pi(a_t|s_t) * (Q_t - b_t)]]\n # HINT2: see define_log_prob (above)\n # to get log pi(a_t|s_t)\n # HINT3: look for a placeholder above that will be populated with advantage values \n # to get [Q_t - b_t]\n # HINT4: don't forget that we need to MINIMIZE this self.loss\n # but the equation above is something that should be maximized\n\n # define the log probability of seen actions/observations under the current policy\n with tf.GradientTape() as tape:\n log_action_probas = self.actor.get_log_prob(obs, acs)\n advantage_values_no_grad = tf.stop_gradient(advantage_values)\n loss = -tf.reduce_mean(advantage_values_no_grad * log_action_probas)\n\n actor_vars = self.actor.trainable_variables\n grads = tape.gradient(loss, actor_vars)\n self.policy_optimizer.apply_gradients(zip(grads, actor_vars))\n\n if self.nn_baseline:\n targets_n = (q_values - np.mean(q_values)) / (np.std(q_values) + 1e-8)\n dataset = tf.data.Dataset.from_tensor_slices(\n (tf.cast(obs, tf.float32), tf.cast(targets_n, tf.float32)))\n dataset = dataset.batch(batch_size=targets_n.shape[0]).repeat()\n # 20 baseline gradient updates with the current data batch.\n self.baseline_model.fit(dataset, epochs=1, steps_per_epoch=20)\n\n return loss.numpy().item()", "def policy(agent):", "def reward_amt(value, reward_vec, adj, softmax_inv_temp, discount, start_prob=None):\n n = len(reward_vec)\n softmax_value = np.exp(softmax_inv_temp * value) / np.sum(np.exp(softmax_inv_temp*value))\n policy = adj * softmax_value.reshape(1, -1)\n policy = util.l1_normalize_rows(policy)\n sr = np.linalg.pinv(np.eye(n) - discount * policy)\n value = np.dot(sr, reward_vec.reshape(-1, 1)).reshape(-1)\n if start_prob is None:\n start_prob = np.ones(n)*1. / n\n else:\n start_prob = start_prob.reshape(n)\n return np.sum(value * start_prob)", "def policy_iter_v(env, policy_eval_v=policy_eval_v, discount_factor=1.0):\n # Start with a random policy\n policy = np.ones([env.nS, env.nA]) / env.nA\n\n policy_stable = False\n while not policy_stable:\n V = policy_eval_v(policy, env, discount_factor=discount_factor)\n\n policy_stable = True\n for state in range(env.nS):\n old_best_action = np.argmax(policy[state])\n best_action = -1\n best_value = -float('inf')\n for action in range(env.nA):\n value = V[env.P[state][action][0][1]]\n if value > best_value:\n best_value = value\n best_action = action\n \n for action in range(env.nA):\n if action == best_action:\n policy[state][action] = 1\n else:\n policy[state][action] = 0\n \n if best_action != old_best_action:\n policy_stable = False\n\n return policy, V", "def updateW(self, trj_Sp_theta, W_0):\n def fun(x):\n global trj_Sp_theta_z\n #W_0 = [[x[0], x[1]], [x[2], x[3]], [x[4], x[5]], [x[6], x[7]]] # sin cos\n W_0 = [[x[0], x[1]],[x[2], x[3]]] # with dir\n #W_0 = x\n r_0 = self.reward_trj(trj_Sp_theta, W_0) \n return -1*r_0 \n import numpy as np\n from scipy.optimize import minimize\n \n global trj_Sp_theta_z \n trj_Sp_theta_z = trj_Sp_theta\n alpha = 0.005\n alpha = 0.1\n delta = alpha\n cons = ({'type': 'eq',\n 'fun' : lambda x: np.array([np.sum(x)-1])},\n {'type': 'ineq',\n 'fun' : lambda x: np.array([np.min(x)])}, # greater than zero\n {'type': 'ineq',\n 'fun' : lambda x: np.array([-np.abs(x[0]-x0[0])+delta])}, # greater than zero\n {'type': 'ineq',\n 'fun' : lambda x: np.array([-np.abs(x[1]-x0[1])+delta])}, # greater than zero\n {'type': 'ineq',\n 'fun' : lambda x: np.array([-np.abs(x[2]-x0[2])+delta])}, # greater than zero\n {'type': 'ineq',\n 'fun' : lambda x: np.array([-np.abs(x[3]-x0[3])+delta])}) # greater than zero\n\n #x0 = W_0\n x0 = [W_0[0][0], W_0[0][1], W_0[1][0], W_0[1][1]] # with dir\n res = minimize(fun, x0, constraints=cons)\n x = res.x\n W = [[x[0], x[1]],[x[2], x[3]]] # with dir\n return W", "def policy (self,forced_actions=None,forced_rewards=None,state_idx=None):\n\t\tif self.gamble:\n\t\t\tself.policy_gamble()\n\t\t\treturn\n\t\tif self.UCB:\n\t\t\tself.policy_UCB(forced_actions,forced_rewards,state_idx)\n\t\t\treturn\n\n\t\tidx = self.idx \t\t\t\t# internal time index of state\n\t\tprobs = self.probs\t\t\t# prob of reward for an action\n\t\tbeta = self.beta\t\t\t# inverse temp \n\n\t\t# calc Act thalamus activation\n\t\tAct = beta*self.Q[idx,:]\n\n\t\t# multioption softmax (invariant to constant offsets)\n\t\tnewAct = Act - np.max(Act)\n\t\texpAct = np.exp(newAct)\n\t\tps = expAct/np.sum(expAct)\n\t\tself.SM[idx,:] = ps\n\t\tcs_ps = np.cumsum(ps)\n\n\t\t# select action\n\t\tif forced_actions is None:\n\t\t\tsample = np.random.random_sample()\n\t\t\tselected = False\n\t\t\tcheck = 0\n\t\t\twhile not selected:\n\t\t\t\tif sample < cs_ps[check]:\n\t\t\t\t\tC = check\n\t\t\t\t\tselected = True\n\t\t\t\telse:\n\t\t\t\t\tcheck = check + 1\n\t\telse:\n\t\t\tC = forced_actions[state_idx,idx]\n\t\tself.C[idx] = C\n\t\t\t\n\t\t# decide whether a reward is delivered\n\t\tif forced_rewards is None:\n\t\t\treward = np.random.binomial(size=1, n=1, p= probs[C])[0]\n\t\telse:\n\t\t\treward = forced_rewards[state_idx,idx]\n\t\tself.R[idx] = reward # indicator that reward was received\n\t\tif reward == 0:\n\t\t\treward = self.l_mag\n\t\telse:\n\t\t\treward = self.r_mag\n\n\t\tPE = reward - self.Q[idx,C]\n\t\tself.PE[idx] = PE", "def policy(self, state, training):\n explore_prob = self.max_explore - (self.steps * self.anneal_rate)#probabilidad de exploracion decreciente\n explore = max(explore_prob, self.min_explore) > np.random.rand()\n\n if training and explore: #hacer exploracion\n action = np.random.randint(self.action_space_size)\n else: #hacer explotacion\n inputs = np.expand_dims(state, 0)\n qvalues = self.online_network.model(inputs) #online or evalation network predicts q-values\n #print(\"***##qvalues\",qvalues)\n action = np.squeeze(np.argmax(qvalues, axis=-1))\n\n return action", "def test_prop_reward(self):\n tmax = 10.0\n dt = 1.0\n\n reward_scale = 5.0\n\n ini_rate = 80.0\n\n tutor = SimpleNeurons(1, out_fct=lambda _: ini_rate+20.0)\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=1.0,\n use_tutor_baseline=False)\n\n sim1 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim1.run(tmax)\n\n drates1 = tutor_rule.rates - ini_rate\n\n tutor_rule.reset_rates()\n reward.reward_fct = lambda t: reward_scale if t < tmax/2 else -reward_scale\n\n sim2 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim2.run(tmax)\n\n drates2 = tutor_rule.rates - ini_rate\n\n self.assertLess(np.max(np.abs(reward_scale*drates1 - drates2)), 1e-6)", "def reward_threshold(self) -> Optional[float]:", "def policy_gradient(state, weight):\n # first calculate policy using the policy function above\n Policy = policy(state, weight)\n # get action from policy\n action = np.random.choice(len(Policy[0]), p=Policy[0])\n # reshape single feature from policy\n s = Policy.reshape(-1, 1)\n # apply softmax function to s and access value at action\n softmax = (np.diagflat(s) - np.dot(s, s.T))[action, :]\n # calculate the dlog as softmax / policy at action\n dlog = softmax / Policy[0, action]\n # find gradient from input state matrix using dlog\n gradient = state.T.dot(dlog[None, :])\n # return action and the policy gradient\n return action, gradient", "def max_weight_policy(self, state: types.StateSpace, eps: float = 1e-6) \\\n -> types.ActionSpace:\n num_activities = self.env.constituency_matrix.shape[1]\n z_star = np.zeros((num_activities, 1))\n for s in self.env.constituency_matrix:\n ind_activities_s = np.argwhere(s > 0)\n max_theta_s, list_max_activity = get_max_gain_station_s(\n ind_activities_s, state, self.env.job_generator.buffer_processing_matrix,\n self.weight_per_buffer)\n if max_theta_s < -eps:\n z_star[ind_activities_s, :] = 0\n else:\n num_positive_actions = 0\n ind_positive_actions = []\n for j in list_max_activity:\n ind_drained_buffer = np.argwhere(\n self.env.job_generator.buffer_processing_matrix[:, j] < 0)\n if state[ind_drained_buffer] >= 1 - eps:\n ind_positive_actions.append(j)\n num_positive_actions += 1\n if num_positive_actions > 0:\n z_star[ind_positive_actions] = 1 / num_positive_actions\n return z_star", "def epsilon_greedy_policy_improve(Q_value, nS, nA, epsilon):\n\n new_policy = epsilon * np.ones((nS, nA)) / nA # = epsilon / m, where m is the number of Actions, nA\n ############################\n # YOUR IMPLEMENTATION HERE #\n # HINT: IF TWO ACTIONS HAVE THE SAME MAXIMUM Q VALUE, THEY MUST BOTH BE EXECUTED EQUALLY LIKELY.\n # THIS IS IMPORTANT FOR EXPLORATION. This might prove useful:\n # https://stackoverflow.com/questions/17568612/how-to-make-numpy-argmax-return-all-occurrences-of-the-maximum\n \n # print(\"new_policy = {0}\".format(new_policy))\n \n for s_t in range (0, nS):\n # print(\"old_policy[{0}] = {1}\".format(s_t, new_policy[s_t]))\n # print(\"Q_value[{0}] = {1}\".format(s_t, Q_value[s_t]))\n Q_list = np.argwhere(Q_value[s_t] == np.amax(Q_value[s_t])).flatten() # get a list of all indices where Q is maximum, (argmax(Q))\n # print(\"Q_list: \" + str(Q_list))\n max_Q = np.random.choice(Q_list.flatten()) # randomly pick from those indices. Picking each index is equally likely.\n # print(\"max_Q: \" + str(max_Q))\n \n # A_star = new_policy[s_t][max_Q]\n # print(\"A_star: \" + str(A_star))\n \n new_policy[s_t][max_Q] += 1 - epsilon # for the chosen maximal index of Q, set the polocy to epsilon/m + 1 - epsilon\n # print(\"new_policy[{0}] = {1}\".format(s_t, new_policy[s_t]))\n \n # for a_t in range (0, nA):\n # if a_t in Q_list:\n # new_policy[s_t][a_t] += (1 - epsilon) / len(Q_list)\n\n ############################\n # print(\"new_policy = {0}\".format(new_policy))\n return new_policy", "def policy_improvement(P, nS, nA, value_from_policy, gamma=0.9):\n\n new_policy = np.ones([nS, nA]) / nA\n\t############################\n\t# YOUR IMPLEMENTATION HERE #\n #iteration_policy=new_policy\n for state in range(nS):\n #current_policy=new_policy[state] \n action_policy = np.zeros(nA) \n for action in range(nA):\n for p,nextS,r,boolean_v in P[state][action]:\n action_policy[action] += p*( r + gamma* value_from_policy[nextS])\n #print(action_policy)\n updated_policy=np.zeros(nA)\n updated_policy[np.argmax(action_policy)]= 1\n #print(updated_policy) \n new_policy[state]=updated_policy\n \n \t############################\n return new_policy", "def objective(trial):\n %time\n env = gym.make('Delivery-v0')\n alpha = trial.suggest_discrete_uniform('alpha', 0.3,0.9,0.3)\n gamma = trial.suggest_discrete_uniform('gamma', 0.6, 1,0.1)\n epsilon = trial.suggest_discrete_uniform('epsilon', 0.01, 0.11, 0.04)\n episodes = 1000000\n \n # For plotting metrics\n all_epochs = []\n all_penalties = []\n rewards = []\n \n #Initialize Q table of 22500 x 8 size (22500 states and 8 actions) with all zeroes\n q_table = np.zeros([env.observation_space.n, env.action_space.n]) \n \n for i in range(1, episodes+1):\n state = env.reset()\n episode_rewards = []\n\n epochs, penalties, reward, = 0, 0, 0\n done = False\n\n while not done:\n if random.uniform(0, 1) < epsilon:\n action = env.action_space.sample() # Explore action space randomly\n else:\n action = np.argmax(q_table[state]) # Exploit learned values by choosing optimal values\n\n next_state, reward, done, info = env.step(action) \n\n old_value = q_table[state, action]\n next_max = np.max(q_table[next_state])\n\n new_value = (1 - alpha) * old_value + alpha * (reward + gamma * next_max)\n q_table[state, action] = new_value\n\n if reward == -10:\n penalties += 1\n \n\n state = next_state\n episode_rewards.append(reward)\n epochs += 1\n \n if done == True:\n break \n if epochs == 1000:\n break \n rewards.append(np.sum(episode_rewards))\n \n last_reward = np.mean(rewards)\n # trial.report(-1 * last_reward)\n\n return -1 * last_reward", "def get_reward(self, observations, actions):\n\n #initialize and reshape as needed, for batch mode\n self.reward_dict = {}\n if(len(observations.shape)==1):\n observations = np.expand_dims(observations, axis = 0)\n actions = np.expand_dims(actions, axis = 0)\n batch_mode = False\n else:\n batch_mode = True\n\n #get vars\n xvel = observations[:, 9].copy()\n body_angle = observations[:, 2].copy()\n front_leg = observations[:, 6].copy()\n front_shin = observations[:, 7].copy()\n front_foot = observations[:, 8].copy()\n zeros = np.zeros((observations.shape[0],)).copy()\n\n # ranges\n leg_range = 0.2\n shin_range = 0\n foot_range = 0\n penalty_factor = 10\n\n #calc rew\n self.reward_dict['run'] = xvel\n\n front_leg_rew = zeros.copy()\n front_leg_rew[front_leg>leg_range] = -penalty_factor\n self.reward_dict['leg'] = front_leg_rew\n\n front_shin_rew = zeros.copy()\n front_shin_rew[front_shin>shin_range] = -penalty_factor\n self.reward_dict['shin'] = front_shin_rew\n\n front_foot_rew = zeros.copy()\n front_foot_rew[front_foot>foot_range] = -penalty_factor\n self.reward_dict['foot'] = front_foot_rew\n\n # total reward\n self.reward_dict['r_total'] = self.reward_dict['run'] + self.reward_dict['leg'] + self.reward_dict['shin'] + self.reward_dict['foot']\n\n #return\n dones = zeros.copy()\n if(not batch_mode):\n return self.reward_dict['r_total'][0], dones[0]\n return self.reward_dict['r_total'], dones", "def build_posterior(self, policy, num_episodes, max_episode_length):\n\n statistics = trange(num_episodes)\n\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n \n state_sequence = np.zeros((state.shape[0], max_episode_length+1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:,0]\n reward_sequence = np.zeros(max_episode_length, dtype=np.float64, order='C')\n \n while ((num_steps < max_episode_length) and (not is_terminal)):\n num_steps+=1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n\n state_sequence[:, num_steps] = state[:,0]\n reward_sequence[num_steps-1] = reward\n\n state_sequence = state_sequence[:, 0:(num_steps+1)]\n reward_sequence = reward_sequence[0:num_steps]\n\n if (self.D.shape[1]==0):\n\n traj = state_sequence[:,0][:,np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:,0][:,np.newaxis])\n self.K_inv = 1/self.kernel(traj, traj)\n self.A = np.array([[1]], dtype=np.float64, order='C')\n self.alpha_ = np.array([[0]], dtype=np.float64, order='C')\n self.C_= np.array([[0]], dtype=np.float64, order='C')\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.shape[1], cumm_cost=np.sum(reward_sequence))", "def _func(w):\r\n W = _adj(w)\r\n loss, G_loss = _loss(W)\r\n h, G_h = _h(W)\r\n obj = loss + 0.5 * rho * h * h + alpha * h + lambda1 * w.sum()\r\n G_smooth = G_loss + (rho * h + alpha) * G_h\r\n g_obj = np.concatenate((G_smooth + lambda1, - G_smooth + lambda1), axis=None)\r\n return obj, g_obj", "def evaluate(game, player):\n weights = [2, 200, 2000, 20000]\n reward = 0\n opponent = get_opponent(player)\n for length in range(2, 6):\n reward += weights[length - 2] * get_num_series(game, player, length)\n reward -= weights[length - 2] * get_num_series(game, opponent, length)\n return reward", "def _dense_reward(self) -> float:\n y = 1\n target_goal_dists = []\n for target_shape in self.__debris_shapes:\n target_pos = target_shape.shape_body.position\n goal_pos = (target_pos[0], y) # Top of screen.\n dist = np.linalg.norm(target_pos - goal_pos)\n if target_pos[1] > 0.88:\n dist = 0\n target_goal_dists.append(dist)\n target_goal_dists = np.mean(target_goal_dists)\n return -1.0 * target_goal_dists", "def _get_reward(self):\n if self.status():\n return self.current_step/self.ep_length # the reward is proportional to the duration \n else:\n return 0", "def softmax(X, Y, w, HProp = None, arg=None, reg=None, batchsize=None):\n if reg == None:\n reg_f = 0\n reg_g = 0\n reg_Hv = lambda v: 0\n else:\n reg_f, reg_g, reg_Hv = reg(w)\n global d, C\n n, d = X.shape\n \n if batchsize is not None:\n n_mini = np.int(np.floor(n*batchsize))\n index_batch = np.random.choice(n, n_mini, replace = False)\n# print(index_batch[:5])\n X = X[index_batch,:]\n Y = Y[index_batch]\n n = n_mini\n \n C = int(len(w)/d)\n w = w.reshape(d*C,1) #[d*C x 1]\n W = w.reshape(C,d).T #[d x C]\n XW = np.dot(X,W) #[n x C]\n large_vals = np.amax(XW,axis = 1).reshape(n, 1) #[n,1 ]\n large_vals = np.maximum(0,large_vals) #M(x), [n, 1]\n #XW - M(x)/<Xi,Wc> - M(x), [n x C]\n XW_trick = XW - np.tile(large_vals, (1, C))\n #sum over b to calc alphax, [n x total_C]\n XW_1_trick = np.append(-large_vals, XW_trick,axis = 1)\n #alphax, [n, ]\n sum_exp_trick = np.sum(np.exp(XW_1_trick), axis = 1).reshape(n, 1)\n log_sum_exp_trick = large_vals + np.log(sum_exp_trick) #[n, 1]\n \n f = np.sum(log_sum_exp_trick)/n - np.sum(np.sum(XW*Y,axis=1))/n + reg_f\n if arg == 'f': \n return f\n inv_sum_exp = 1./sum_exp_trick\n inv_sum_exp = np.tile(inv_sum_exp,(1,np.size(W,axis = 1)))\n S = inv_sum_exp*np.exp(XW_trick) #h(x,w), [n x C] \n g = np.dot(X.T, S-Y)/n #[d x C]\n g = g.T.flatten().reshape(d*C,1) + reg_g#[d*C, ] \n\n if arg == 'g':\n return g \n \n if arg == 'fg':\n return f, g\n\n if HProp == None:\n Hv = lambda v: hessvec(X, S, n, v) + reg_Hv(v) \n return f, g, Hv\n else:\n n_H = np.int(np.floor(n*HProp))\n idx_H = np.random.choice(n, n_H, replace = False)\n inv_sum_exp_H = 1./(sum_exp_trick[idx_H,:])\n inv_sum_exp_H = np.tile(inv_sum_exp_H,(1,np.size(W,axis = 1)))\n S_H = inv_sum_exp_H*np.exp(XW_trick[idx_H,:]) #h(x,w), [S x C] \n Hv = lambda v: hessvec(X[idx_H,:], S_H, n_H, v) + reg_Hv(v)\n return f, g, Hv\n \n if arg == 'explicit':\n f = np.sum(log_sum_exp_trick) - np.sum(np.sum(XW*Y,axis=1)) + reg_f\n g = np.dot(X.T, S-Y) #[d x C]\n g = g.T.flatten().reshape(d*C,1) + reg_g #[d*C, ]\n Hv = lambda v: hessvec(X, S, v, reg)\n #S is divided into C parts {1:b}U{c}, [n, ] * C\n S_cell = np.split(S.T,C) \n SX_cell = np.array([]).reshape(n,0) #empty [n x 0] array\n SX_self_cell = np.array([]).reshape(0,0)\n for column in S_cell:\n c = spdiags(column,0,n,n) #value of the b/c class\n SX_1_cell = np.dot(c.A,X) #WX = W x X,half of W, [n x d]\n #fill results from columns, [n x d*C]\n SX_cell = np.c_[SX_cell, SX_1_cell] \n SX_cross = np.dot(SX_cell.T,SX_cell) #take square, [d*C x d*C] \n #X.T x WX half of W, [d x d]\n SX_1self_cell = np.dot(X.T,SX_1_cell) \n #put [d x d] in diag, W_cc, [d*C x d*C] \n SX_self_cell = block_diag(SX_self_cell,SX_1self_cell) \n H = SX_self_cell - SX_cross #compute W_cc, [d*C x d*C]\n H = H + 2*reg*identity(d*C)\n return f, g, Hv, H", "def optimize_model(optimizer, policy_net, target_net, memory_batch):\n state_batch, action_batch, reward_batch, next_state_batch, done_batch = memory_batch\n state_batch =state_batch.to(device, torch.float32)\n action_batch = action_batch.to(device, torch.int64).view(-1,1)\n reward_batch = reward_batch.to(device, torch.float32)\n next_state_batch = next_state_batch.to(device, torch.float32)\n done_batch = done_batch.to(device, torch.float32)\n\n # Compute Q(s_t, a) - the model computes Q(s_t), then we select the\n # columns of actions taken\n state_action_values = policy_net(state_batch).gather(1, action_batch)\n\n # Compute V(s_{t+1}) for all next states.\n with torch.no_grad():\n next_state_action_values = target_net(next_state_batch)\n next_state_values = next_state_action_values.max(1)[0]\n next_state_values = next_state_values * (1 - done_batch) # no reward if this episode is done.\n # Compute the expected Q values\n expected_state_action_values = (next_state_values * gamma) + reward_batch\n expected_state_action_values = expected_state_action_values.unsqueeze(1)\n\n # Compute Huber loss\n assert expected_state_action_values.requires_grad == False\n assert state_action_values.requires_grad == True\n loss = F.smooth_l1_loss(state_action_values, expected_state_action_values)\n\n # Optimize the model\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n return loss", "def back_propagate(self, reward, maxQ):\n\n error = self.alpha * (reward + self.gamma*maxQ - self.value)\n #logging.debug(\"error is now %s\" % (error))\n\n # sigmoid derivate is sigmoid(x) * (1 - sigmoid(x) )\n dsig = self.value * (1 - self.value)\n\n gradient = error * dsig\n #logging.debug(\"gradient is now: %s\" % (gradient))\n\n self.weigths = np.add( self.weights, np.multiply(gradient, self.weights) )\n # self.weights = [gradient * w + w for w in self.weights]", "def compute_optimal_policy(self):\n\n self.theta_history.append(self.theta)\n\n since = time()\n for it in range(self.n_itr):\n print(\"lr: {} | Iteration N: {} \\r\".format(self.lr, it), end=\"\")\n\n self.policy = GaussianPolicy(self.theta, self.sigma)\n\n # Simulate N trajectories\n paths = collect_episodes(\n self.sim, policy=self.policy, horizon=self.T, n_episodes=self.n_episodes)\n\n avg_return = self._compute_performance(paths=paths)\n self.avg_returns.append(avg_return)\n\n # Gradient update\n self.theta += self.update_rule(self.policy.grad_J(\n paths, self.discounts, n_ep=self.n_episodes, T=self.T), lr=self.lr)\n\n # History update\n self.theta_history.append(self.theta)\n\n # print(\"\\nTook {}s\".format(round(time() - since, 2)))\n print(\"lr: {} | Iteration N: {} | Took: {}s\".format(self.lr, self.n_itr, round(time() - since, 2)))", "def _compute_reward(self):\n last_score = self.episode_qualities[-2]\n new_score = self.episode_qualities[-1]\n reward = new_score - last_score\n return reward", "def build_posterior(self, policy, num_episodes, max_episode_length, test_every=np.inf, states_V_target=()):\n\n statistics = trange(num_episodes)\n test_error = np.array([])\n\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n \n state_sequence = np.empty((state.shape[0], max_episode_length+1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:,0]\n reward_sequence = np.empty(max_episode_length, dtype=np.float64, order='C')\n \n while ((num_steps < max_episode_length) and (not is_terminal)):\n num_steps+=1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n\n state_sequence[:, num_steps] = state[:,0]\n reward_sequence[num_steps-1] = reward\n\n state_sequence = state_sequence[:, 0:(num_steps+1)]\n reward_sequence = reward_sequence[0:num_steps]\n\n if (self.D.shape[1]==0):\n\n traj = state_sequence[:,0][:,np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:,0][:,np.newaxis])\n self.K_inv = 1/self.kernel(traj, traj)\n self.A = np.array([[1]])\n self.alpha_ = np.array([[0]])\n self.C_= np.array([[0]])\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.shape[1], cumm_cost=np.sum(reward_sequence))\n if (e%test_every==0 and len(states_V_target)==2):\n V = self.get_value_function(states_V_target[0])\n test_error = np.concatenate((test_error, np.array([np.mean(np.abs(V - states_V_target[1]))])))\n\n return test_error", "def _evaluate_policy(self, state, legal_actions, step_rewards=None, action=None):\n assert step_rewards is not None\n probabilities = torch.exp(torch.tensor(step_rewards, dtype=self.dtype))\n probabilities = probabilities / torch.sum(probabilities)\n\n if action is not None:\n return probabilities[action]\n else:\n return probabilities", "def __call__(self, w: tf.Tensor) -> tf.Tensor:\n return tf.maximum(w, self.epsilon)", "def get_weights(y_true, prior_probs, params):\n # Parameters\n _lambda = params['lambda']\n Q = prior_probs.shape[0]\n\n # The weights are proportional to\n all_w = ((1 -_lambda)*prior_probs + _lambda/Q)**(-1) # (Q,)\n\n # The weighted distribution must sum to one: E[w] = sum(p_tilde*w) = 1\n all_w = all_w / tf.reduce_sum(prior_probs * all_w) # (Q,)\n\n # Find q_star\n q_star = tf.argmax(y_true, axis=3) # (b, H, W)\n\n # Select weights\n all_v = tf.gather(all_w, q_star) # (b, H, W)\n\n # Cast to float32, which is necessary for further calculations\n all_v = tf.cast(all_v, tf.float32) # (b, H, W)\n\n return all_v", "def reward(input):\n state = np.array([input[0], input[1]])\n action = input[2]\n action = np.clip(action, -2.0, 2.0)\n costs = angle_normalize(state[0])**2 + .1 * state[1]**2 + .001 * (action**2)\n\n return - costs", "def _compute_reward(self): \n reward = -1\n return reward", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n possibleActions = self.mdp.getPossibleActions(state)\n if len(possibleActions) == 0: return None\n results = []\n for action in possibleActions:\n total = 0\n for (nextState, prob) in self.mdp.getTransitionStatesAndProbs(state,action):\n total += (prob * self.values[nextState])\n results.append(total)\n maxIndex = max(enumerate(results), key=lambda x: x[1])[0]\n #print(\"here\")\n return possibleActions[maxIndex]", "def step_maxL_gradient_descent(y, tx, w, gamma):\n loss=loss_maxL(y, tx, w)\n grad=calculate_maxL_gradient(y,tx,w)\n # update w by gradient\n w=w-gamma*grad\n return w, loss", "def policy_backward(self, eph, epx, epdlogp, model_type):\n db2 = sum(epdlogp)[0]\n dW2 = np.dot(eph.T, epdlogp).ravel()\n dh = np.outer(epdlogp, self.model['W2_' + model_type])\n dh[eph <= 0] = 0 # backpro prelu\n db1 = sum(dh)\n dW1 = np.dot(dh.T, epx)\n return {'W1_' + model_type: dW1, 'W2_' + model_type: dW2, 'b1_' + model_type: db1, 'b2_' + model_type: db2}", "def policy_loss(sal_box_prob, oracle_action, sample_weights):\n loss = tf.nn.softmax_cross_entropy_with_logits(logits=sal_box_prob, labels=oracle_action)\n \n return tf.reduce_mean(loss) * (1.0 - sample_weights / 10.0)", "def _policy_eval(self, policy: np.ndarray) -> np.ndarray:\n V = np.zeros(self.state_dim)\n diff = 1.0\n dr = 0.9\n while (diff >= self.theta):\n diff = 0.0\n for s in self.mdp._state_dict:\n old = V[self.mdp._state_dict[s]]\n temp = 0.0\n for opt in range(self.action_dim):\n if policy[self.mdp._state_dict[s],opt] == 1.0: \n for next_s in self.mdp._state_dict:\n p = self.mdp.P[self.mdp._state_dict[s],opt,self.mdp._state_dict[next_s]]\n r = self.mdp.R[self.mdp._state_dict[s],opt,self.mdp._state_dict[next_s]]\n Vs = V[self.mdp._state_dict[next_s]]\n temp = temp + p * (r + dr * Vs)\n V[self.mdp._state_dict[s]] = temp\n diff = max(diff,abs(old - V[self.mdp._state_dict[s]]))\n return V", "def elbo_with_policy(self, rng, params, x, policy, train, context=None):\n d = np.prod(x.shape[1:])\n batch_size = x.shape[0]\n\n rng_perm, rng_t, rng_drop = jax.random.split(rng, 3)\n\n # Get random sigma ~ Unif(S_n_steps)\n sigmas = ardm_utils.get_batch_permutations(rng_perm, x.shape[0],\n self.num_steps)\n\n # Sample t from policy.\n t, _, weight_policy = self.sample_policy_t(rng_t, batch_size, policy)\n\n prev_selection, _ = ardm_utils.get_selection_for_sigma_and_t(\n sigmas, t, self.config.mask_shape)\n future_selection = (1. - prev_selection)\n\n corrupted = self.corrupt(x, prev_selection)\n\n net_out = self.apply_fn(\n {'params': params}, corrupted, t, prev_selection, train,\n rngs={'dropout': rng_drop} if train else None, context=context)\n\n log_px_sigma_geq_t = self.logprob_fn(x, net_out)\n\n log_px_sigma_geq_t = future_selection.reshape(\n log_px_sigma_geq_t.shape) * log_px_sigma_geq_t\n log_px_sigma_geq_t = util_fns.sum_except_batch(log_px_sigma_geq_t)\n\n ce = log_px_sigma_geq_t / d / np.log(2)\n\n # Reweigh for expectation over i.\n reweighting_factor_expectation_i = 1. / (self.num_steps - t)\n elbo_per_t = reweighting_factor_expectation_i * log_px_sigma_geq_t\n\n # Reweigh for expectation over policy.\n elbo = elbo_per_t * weight_policy\n\n elbo = elbo / d / np.log(2)\n elbo_per_t = elbo_per_t / d / np.log(2)\n\n return elbo, elbo_per_t, ce, t", "def policy_evaluation(self):\n self.V = np.zeros((self.environment.num_rows * self.environment.num_columns,))\n self.environment.reset()\n\n end = False\n while not end:\n delta = 0\n\n for state in range(self.environment.num_rows * self.environment.num_columns):\n v = self.V[state]\n\n # Gets the max value got from any of the different accion\n self.V[state] = np.max([self.calc_value(state, action) * self.policy.get_action_probs(state, [\n self.environment.actions.index(action)]) for action in self.environment.actions])\n # Gets the maximum difference between current and previous values\n delta = max(delta, abs(v - self.V[state]))\n\n # Only ends if the delta is lower than theta (small value)\n end = delta <= self.theta\n\n return self.V", "def __generate_reward_function(self):\n K = -3\n self.reward = np.array([[10, 0, K],\n [0, 2, 0],\n [K, 0, 10]])", "def _compute_reward(self):\n reward = 0.0\n return reward", "def compute_intrinsic_reward(self, next_obs):\r\n next_obs = torch.tensor(next_obs, dtype=torch.float, device=self.device)\r\n #next_obs = torch.FloatTensor(next_obs).to(self.device)\r\n\r\n target_next_feature = self.rnd.target(next_obs)\r\n predict_next_feature = self.rnd.predictor(next_obs)\r\n intrinsic_reward = (target_next_feature - predict_next_feature).pow(2).mean(1) ### MSE --- Issues\r\n #intrinsic_reward = (target_next_feature - predict_next_feature).pow(2).sum(1) / 2\r\n\r\n return intrinsic_reward.data.cpu().numpy()", "def extract_optimal_policy(self):\n self.Policy = np.argmax(self.Q, axis=1)\n if self.mode == 'debug':\n print(\"Optimal Policy:\",self.Policy)", "def _get_reward(self, normalized_state, normalized_unconstrained_action, normalized_constrained_action):\n denormalized_unconstrained_charge_rate_in_W = self.denormalize_network_output(normalized_unconstrained_action)\n denormalized_constrained_charge_rate_in_W = self.denormalize_network_output(normalized_constrained_action)\n denormalized_state = normalized_state * self.energy_system.stm_train_subsequent_states_stds + self.energy_system.stm_train_subsequent_states_means\n\n cost_of_net_drawn_electricity = self._get_cost_of_net_drawn_electricity_in_euros(denormalized_state, denormalized_constrained_charge_rate_in_W)\n charge_rate_punishment = self._get_punishment_for_excessive_charge_rate(denormalized_unconstrained_charge_rate_in_W)\n soc_punishment = self._get_punishment_for_impossible_resulting_soc(denormalized_state, denormalized_unconstrained_charge_rate_in_W) \n reward = - cost_of_net_drawn_electricity - charge_rate_punishment - soc_punishment\n #tf.summary.scalar('cost_of_net_drawn_electricity in euros', cost_of_net_drawn_electricity) \n #tf.summary.scalar('reward', reward)\n\n return reward, cost_of_net_drawn_electricity", "def get_softmax_policy(intensity_of_choice):\n beta = intensity_of_choice\n \n def softmax_policy(Qvalues_oa):\n \"\"\"Returns softmax action probabilites from Qvalues\"\"\"\n betaQoa = beta * Qvalues_oa\n betaQoa_ = betaQoa - betaQoa.mean(-1, keepdims=True)\n expQoa = np.exp(betaQoa_)\n assert not np.any(np.isinf(expQoa)), \"behavior policy contains infs\"\n return expQoa / expQoa.sum(axis=-1, keepdims=True)\n \n return softmax_policy", "def lossFun(review, target, hprev):\n xs, hs, ys, ps = {}, {}, {}, {}\n hs[-1] = np.copy(hprev)\n loss = 0\n\n # forward pass\n for t in range(len(review)):\n xs[t] = np.zeros((vector_len,1)) # encode in 1-of-k representation\n for j in range(32):\n xs[t][j] = review[t][j]\n hs[t] = np.tanh(np.dot(Wxh, xs[t]) + np.dot(Whh, hs[t-1]) + bh) # hidden state\n\n #Many 2 one\n last = len(review) - 1\n ys = np.dot(Why, hs[last]) + by # unnormalized log probabilities for next chars\n ps = np.exp(ys) / np.sum(np.exp(ys)) # probabilities for next chars\n loss = -np.log(ps[target,0]) # softmax (cross-entropy loss)\n\n # backward pass: compute gradients going backwards\n dWxh, dWhh, dWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)\n dbh, dby = np.zeros_like(bh), np.zeros_like(by)\n dhnext = np.zeros_like(hs[0])\n\n dy = np.subtract(ps,target) # backprop into y. see http://cs231n.github.io/neural-networks-case-study/#grad if confused here\n dWhy += np.dot(dy, hs[last].T)\n dby += dy\n dh = np.dot(Why.T, dy) + dhnext # backprop into h\n for t in reversed(range(len(review))):\n dhraw = (1 - (hs[t] * hs[t].T)) * dh # backprop through tanh nonlinearity\n dbh += dhraw\n dWxh += np.dot(dhraw, xs[t].T)\n dWhh += np.dot(dhraw, hs[t-1].T)\n dhnext = np.dot(Whh.T, dhraw)\n for dparam in [dWxh, dWhh, dWhy, dbh, dby]:\n np.clip(dparam, -5, 5, out=dparam) # clip to mitigate exploding gradients\n return loss, dWxh, dWhh, dWhy, dbh, dby, hs[last]", "def _reward(self):\n\n return 1-self.step_count/ self.max_steps" ]
[ "0.65174586", "0.6455131", "0.62911916", "0.6210671", "0.61917514", "0.6106755", "0.60980415", "0.60720646", "0.6065897", "0.60095304", "0.5995886", "0.5988406", "0.59755456", "0.5938648", "0.59124494", "0.5908187", "0.58875597", "0.58874375", "0.58826035", "0.5856284", "0.5836028", "0.5833257", "0.5812267", "0.58067435", "0.5802967", "0.57958376", "0.5794594", "0.57878906", "0.57828575", "0.57591045", "0.5740246", "0.57377356", "0.57154506", "0.5711112", "0.5704977", "0.56961983", "0.5693183", "0.5691267", "0.56891835", "0.567732", "0.56726", "0.5668703", "0.5657371", "0.5652574", "0.56491894", "0.56363994", "0.56316113", "0.56279784", "0.56231076", "0.5614083", "0.56063", "0.56062824", "0.55989563", "0.5597093", "0.55958366", "0.55890626", "0.55812174", "0.5579738", "0.55772245", "0.55771327", "0.55738705", "0.557084", "0.55708027", "0.55703175", "0.5568401", "0.5557754", "0.5554333", "0.55491525", "0.5517447", "0.5510684", "0.55052084", "0.5498494", "0.5483703", "0.5478463", "0.5477733", "0.5472595", "0.5471282", "0.5467082", "0.54636616", "0.54627496", "0.5457121", "0.5456969", "0.54519534", "0.545142", "0.54495937", "0.5444748", "0.5444615", "0.54441476", "0.5432258", "0.5417845", "0.5413072", "0.54032207", "0.5403042", "0.5399636", "0.53992796", "0.53980905", "0.5394697", "0.5392919", "0.53874385", "0.5385065" ]
0.71800405
0
Given a MaxEnt policy, begin with the start state distribution and propagate forward to find the expected state frequencies over the horizon
def calcExpectedStateFreq(trans_mat, horizon, start_dist, policy): n_states = np.shape(trans_mat)[0] n_actions = np.shape(trans_mat)[1] # Copy start_dist for complete horizon, states x horizon exp_svf = np.zeros((n_states, horizon)) exp_svf[0,0] = 1.0 # Calculate Expected State Frequency for (state, time) pair for time in range(1, horizon): for state, action, new_state in product(range(n_states), range(n_actions), range(n_states)): exp_svf[new_state, time] += (exp_svf[state, time-1]* policy[state, action]* trans_mat[state, action, new_state]) state_freq = exp_svf.sum(axis=1) return state_freq
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calcMaxEntPolicy(trans_mat, horizon, r_weights, state_features):\n\tn_states = np.shape(trans_mat)[0]\n\tn_actions = np.shape(trans_mat)[1]\n\n\tpartition = np.zeros((n_states, 1))\n\tpolicy = np.zeros((n_states, n_actions))\n\n\tpartition[n_states-1] = 1\n\treward = np.exp(np.dot(r_weights, state_features.T))\n\n\t# Calculate partition function for each state and policy value for (state,action)\n\tfor i in range(horizon):\n\t\tnew_partition = partition\n\t\tfor state in range(n_states):\n\t\t\tfor action in range(n_actions):\n\t\t\t\tp = np.array([trans_mat[state, action, new_state]*reward[state]*partition[new_state] for new_state in range(n_states)])\n\t\t\t\tpolicy[state, action] = np.sum(p)\n\t\t\tnew_partition[state] = np.sum(policy[state, :])\n\t\t\tif state == n_states-1:\n\t\t\t\tnew_partition[state] = 1\n\t\tpartition = new_partition\n\n\n\t# Local action probability computation\n\tfor state in range(n_states):\n\t\tfor action in range(n_actions):\n\t\t\tif partition[state] != 0.0:\n\t\t\t\tpolicy[state, action] = policy[state, action] / partition[state]\n\n\treturn policy", "def expected_svf_from_policy(p_transition, p_initial, terminal, p_action, eps=1e-5):\n n_states, _, n_actions = p_transition.shape\n\n # 'fix' our transition probabilities to allow for convergence\n # we will _never_ leave any terminal state\n p_transition = np.copy(p_transition)\n p_transition[terminal, :, :] = 0.0\n\n # set-up transition matrices for each action\n p_transition = [np.array(p_transition[:, :, a]) for a in range(n_actions)]\n\n # actual forward-computation of state expectations\n d = np.zeros(n_states)\n\n delta = np.inf\n while delta > eps:\n d_ = [p_transition[a].T.dot(p_action[:, a] * d) for a in range(n_actions)]\n d_ = p_initial + np.array(d_).sum(axis=0)\n\n delta, d = np.max(np.abs(d_ - d)), d_\n\n return d", "def policy_eval():\r\n \r\n action_prob = [0.125, 0.625, 0.125, 0.125]# actions with probabilities\r\n data = grid_world()\r\n state_axis = np.zeros((9, 9))#initialize states\r\n threshold = .1\r\n prior_state = np.ones((9, 9))\r\n \r\n while np.abs(state_axis - prior_state).max() > threshold:\r\n for x, y in product(range(9), repeat=2):\r\n prior_state = state_axis.copy()\r\n if data.array[x, y] == 'X':\r\n continue\r\n updated_values = [data.next_direction(np.array([x, y]), next_move)\r\n for next_move in data.directions]#Updating states with directions\r\n Sum_Expectation = np.dot(action_prob,\r\n [points_val + 0.9 * state_axis[position[0], position[1]]\r\n for position, points_val in updated_values])\r\n state_axis[x, y] = Sum_Expectation\r\n print(\"\\nExercise 3.1 Shows Value functions for the policy\\n\")\r\n print(state_axis)\r\n build_grid(state_axis, \"Shows Value functions for the policy\")", "def calculate_policy(self, state):\n # short aliases\n s = state # s stands for state\n g = self.config['gamma'] # g stands for gamma\n n = self.action_space.n # n stands for the number of actions\n a = self.config['alpha']\n pi_s = self.policy[state] # pi_s stands for the policy in state s\n weights = self.weights[state]\n # print(weights)\n\n\n # obtains the probability vector from Hedge: p_i(t) = (1+alpha)^s_i(t) / sum_{j \\in K} (1+alpha)^s_j(t)\n sum_weights_exponentials = sum([(1 + a) ** w for w in weights])\n pre_prob = [(((1 + a) ** w) / sum_weights_exponentials) for w in weights]\n\n # the policy is a probability vector, giving the probability of each action\n pi_s = [((1 - g) * p) + (g / n) for p in pre_prob]\n\n return pi_s", "def valueLookAhead(self, state, estState, action):\n # Get the state distribution, assuming we take action.\n newDist = {}\n for pState in range(self.P):\n for newState, prob in self.trans(state, pState)[action].iteritems():\n newF, newP = newState\n if newState not in newDist:\n newDist[newF] = [0.0] * self.P\n # Note that newDist[newF] is a (not-normalized)\n # state probability distribution.\n newDist[newF][newP] += prob * estState[pState]\n\n # For each possible newF, calculate the maximum value.\n maxValue = -float('inf')\n for newF, dist in newDist.iteritems():\n normDist = [x/sum(dist) for x in dist]\n for vector in self.vectors[newF]:\n dotProduct = sum(vector[i] * normDist[i] for i in range(self.P))\n if dotProduct > maxValue:\n maxValue = dotProduct\n\n rewardValue = 0\n for pState in range(self.P):\n rewardValue += self.reward(state, pState, action) * estState[pState]\n return maxValue + rewardValue", "def training_policy(self, state):\n if self.epsilon > random.random():\n return random.randint(0, 1)\n return self.policy(state)", "def first_move_distr(policy, env):\n state = env.reset()\n state = torch.from_numpy(state).long().unsqueeze(0)\n state = torch.zeros(3,9).scatter_(0,state,1).view(1,27)\n pr = policy(Variable(state))\n return pr.data", "def first_move_distr(policy, env):\n state = env.reset()\n state = torch.from_numpy(state).long().unsqueeze(0)\n state = torch.zeros(3,9).scatter_(0,state,1).view(1,27)\n pr = policy(Variable(state))\n return pr.data", "def gen_policy_rollout(self, observations, dynam_model):\n # initialize statespace array in pytorch tensor -> need that good good gradient!\n # raise NotImplementedError(\"Need to implement the rollouts with tensors, or maybe not for gradients\")\n\n # for sampling the state progression\n norm_dist = torch.distributions.Normal(0, 1)\n\n # for storing the costs and gradients\n costs = torch.Tensor()\n baselines = torch.Tensor()\n log_probabilities = torch.Tensor()\n states = torch.Tensor()\n ''' \n # for all of these values, think a row as an updating time series for each particle\n costs = torch.zeros((self.P, self.T))\n baselines = torch.zeros((self.P, self.T))\n probabilities = torch.zeros((self.P, self.T))\n\n # This is what we would do for the states, but it's more efficient to concatenate them\n states = torch.Tensor((self.P,self.T, self.n_in))\n '''\n\n # Change in state vs raw state key\n pred_key = dynam_model.change_state_list\n # print(pred_key)\n\n # TODO: Generate initial states for each particle based on the distribution of data it was trained on\n bound_up = torch.Tensor(np.max(observations, 0))\n bound_low = torch.Tensor(np.min(observations, 0))\n\n obs_dist = torch.distributions.uniform.Uniform(bound_low, bound_up)\n\n # iterate through each particle for the states and probabilites for gradient\n for p in range(self.P):\n\n # Choose the dynamics model from the ensemble\n num_ens = dynam_model.E\n if num_ens == 0:\n model = dynam_model\n else:\n model_idx = random.randint(0, num_ens - 1)\n model = dynam_model.networks[model_idx]\n\n num_obs = np.shape(observations)[1]\n # x0 = torch.Tensor(observations[random.randint(0,num_obs),:])\n x0 = obs_dist.sample()\n\n # TODO: Normalize the states before passing into the NN\n\n state_mat = x0.view((1, 1, -1))\n # print(state_mat)\n # state_mat = x0.unsqueeze(0).unsqueeze(0) # takes a (n_in) vector to a (1,1,n_in) Tensor\n # torch.cat((), axis = 1) to cat the times\n # torch.cat((), axis = 0) to cat the particle\n # is there a way to do this without unsqueeze? Seems like the most efficient way\n log_prob_vect = torch.Tensor([1]) # states the column with 1 for concat'ing\n\n for t in range(self.T):\n # generate action from the policy\n action = self.forward(state_mat[0, t, :])\n # print(action)\n # quit()\n\n # forward pass current state to generate distribution values from dynamics model\n means, var = model.distribution(state_mat[0, t, :], action)\n # print(var)\n # sample the next state from the means and variances of the state transition probabilities\n vals = var * norm_dist.sample((1, self.n_in)) + means\n # need to account for the fact that some states are change in and some are raw here\n\n # batch mode prob calc\n log_probs = -.5 * torch.abs(vals - means) / var\n # log_probs = -.5*torch.abs(vals - means)/(var**2)\n\n # for s in range(self.n_in):\n # # sample predicted new state for each element\n # val = var[s]*np.random.normal()+means[0] # sample from the scaled gaussian with y = sigma*x + mu\n\n # # calculate probability of this state for each sub state\n # p = -.5*(val-means[0])/var[0]\n # states = torch.cat((states, state), 0)\n # probabilities = torch.cat((probabilities, p), 0)\n\n # reduce the probabilities vector to get a single probability of the state transition\n log_prob = torch.sum((log_probs), 1)\n log_prob_vect = torch.cat((log_prob_vect, log_prob))\n\n state = torch.Tensor(vals).view((1, 1, -1))\n\n state_mat = torch.cat((state_mat, state),\n 1) # appends the currnt state to the current particle, without overwriting the otherone\n\n # print(state_mat)\n # calculates costs\n # idea ~ calculate the cost of each each element and then do an cumulative sum for the costs\n # use torch.cumsum\n c_list = []\n for state in state_mat.squeeze():\n c_row = self.cost_fnc(state)\n c_list.append(c_row)\n c_list = torch.stack(c_list)\n\n # note we calc the cum sum on the flipped tensor, then flip costs back\n cost_cum = torch.cumsum(torch.flip(c_list, [0]), 0)\n # Assembles the arrays for the current particle\n\n # costs were given above\n costs = torch.cat((costs, torch.flip(cost_cum, [0]).view(1, -1)), 0)\n\n # update the states array for each particle\n states = torch.cat((states, state_mat), 0)\n\n # concatenates the vector of prob at each time to the 2d array\n log_probabilities = torch.cat(\n (log_probabilities, log_prob_vect.view((1, -1))), 0)\n\n # calculates baselines as the leave one out mean for each particle at each time\n costs_summed = torch.sum(costs, 0)\n costs_summed_exp = costs_summed.expand_as(costs)\n costs_leave_one_out = costs_summed_exp - costs\n baselines = costs_leave_one_out / (self.P - 1)\n\n # freezes gradients on costs and baselines\n # these two lines of code actually do nothing, but are for clarification\n # costs.requires_grad_(requires_grad=False)\n # baselines.requires_grad_(requires_grad=False)\n # . detach() is another way to ensure this\n \"\"\"\n RuntimeError: you can only change requires_grad flags of leaf variables. If you want to use a \n computed variable in a subgraph that doesn't require differentiation use var_no_grad = var.detach().\n \"\"\"\n costs_d = costs.detach()\n baselines_d = baselines.detach()\n # costs_d = costs.requires_grad_(False)\n # baselines_d = baselines.requires_grad_(False)\n # print(baselines)\n # print(probabilities)\n # print(costs)\n # print(log_probabilities)\n return states, log_probabilities, costs_d, baselines_d", "def forwardPolicyNet(self, state):\n with torch.no_grad():\n q_values = self.policy_net(state)\n return q_values", "def update_policy(self, minibatch_size):\n \n steps = self.rewards.shape[0]\n batch_size = self.rewards.shape[0] * self.rewards.shape[1]\n #steps = 500\n #batch_size = 500\n #print(steps)\n #print(batch_size)\n \n # Compute advantages\n '''\n with torch.no_grad():\n if self.gae:\n advantages = torch.zeros_like(self.rewards).to(self.training_device)\n lastgaelam = 0\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n nextvalues = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t + 1]\n nextvalues = self.state_values[t + 1]\n delta = self.rewards[t] + self.gamma * nextvalues * nextnonterminal - self.state_values[t]\n advantages[t] = lastgaelam = delta + self.gamma * self.gae_lambda * nextnonterminal * lastgaelam\n returns = advantages + self.state_values\n else:\n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n ''' \n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n \n\n # flatten the batch\n #b_obs = self.states.reshape((-1,) + self.state_space)\n #print(self.states.shape)\n b_obs = self.states.reshape((-1,4)).detach()\n b_logprobs = self.action_probs.reshape(-1,1).detach()\n b_actions = self.actions.reshape((-1,)).detach()\n b_advantages = advantages.reshape(-1,1)\n b_returns = returns.reshape(-1,1)\n b_values = self.state_values.reshape(-1,1)\n \n # Optimize policy and value network for K epochs, run optimization in minibatches\n \n inds = np.arange(batch_size)\n for i_epoch_pi in range(self.epochs):\n np.random.shuffle(inds)\n for start in range(0, batch_size, minibatch_size):\n end = start + minibatch_size\n minibatch_ind = inds[start:end]\n mb_advantages = b_advantages[minibatch_ind]\n if self.norm_adv:\n mb_advantages = (mb_advantages - mb_advantages.mean()) / (mb_advantages.std() + 1e-8)\n \n #_, newlogproba, entropy = self.get_action(b_obs[minibatch_ind], b_actions[minibatch_ind])\n newlogproba, entropy = self.evaluate(b_obs[minibatch_ind], b_actions[minibatch_ind])\n #ratio = (newlogproba - b_logprobs[minibatch_ind]).exp()\n ratio = torch.exp((newlogproba - b_logprobs[minibatch_ind].detach()))\n \n # Stats\n approx_kl = (b_logprobs[minibatch_ind] - newlogproba).mean()\n\n # Policy loss\n pg_loss1 = -mb_advantages * ratio\n pg_loss2 = -mb_advantages * torch.clamp(ratio, 1 - self.clip_epsilon, 1 + self.clip_epsilon)\n pg_loss = torch.max(pg_loss1, pg_loss2).mean()\n entropy_loss = entropy.mean()\n\n # Value loss\n _, new_values = self.policy.forward(b_obs[minibatch_ind])\n if self.clip_vloss:\n \n v_loss_unclipped = self.MseLoss(new_values,b_returns[minibatch_ind])\n #v_loss_unclipped = ((new_values - b_returns[minibatch_ind]) ** 2)\n v_clipped = b_values[minibatch_ind] + torch.clamp(new_values - b_values[minibatch_ind],\n -self.clip_epsilon, self.clip_epsilon)\n #v_loss_clipped = (v_clipped - b_returns[minibatch_ind]) ** 2\n v_loss_clipped = self.MseLoss(v_clipped,b_returns[minibatch_ind])\n v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped)\n #v_loss = 0.5 * v_loss_max.mean()\n v_loss = 0.5 * v_loss_max\n else:\n #v_loss = 0.5 * ((new_values - b_returns[minibatch_ind]) ** 2).mean()\n v_loss = self.MseLoss(new_values,b_returns[minibatch_ind])\n\n loss = pg_loss + v_loss * self.vf_coeff - self.ent_coeff * entropy_loss\n\n self.optimizer.zero_grad()\n loss.backward()\n torch.nn.utils.clip_grad_norm_(self.policy.parameters(), self.max_grad_norm)\n self.optimizer.step()\n # Copy new weights into old policy:\n self.old_policy.load_state_dict(self.policy.state_dict())", "def pathProb(self, path):\n # Establish initial state distribution.\n estState = []\n for s in range(self.P):\n estState.append(self.initial(path[0][0], s))\n logProb = 0\n for step in range(1, len(path)):\n # Calculate a softmax probability that the agent uses each alpha\n # vector, then sort by action.\n lastF = path[step-1][0]\n lastP = path[step-1][1]\n thisF = path[step][0]\n thisP = path[step][1]\n\n # These are log probs.\n actionProbs = [0.0]*self.A\n totalWeight = float('-inf')\n maxScore = float('-inf')\n for action in range(self.A):\n score = self.valueLookAhead(lastF, estState, action)\n maxScore = max(score, maxScore)\n actionProbs[action] = self.tau * score\n totalWeight = logAdd(totalWeight, self.tau * score)\n # Tally up the probability that the agent goes to the correct state.\n pTrans = 0\n actionTable = {}\n for action in range(self.A):\n nextSTable = self.trans(lastF, lastP)[action]\n if not (thisF, thisP) in nextSTable:\n continue\n pThisAction = nextSTable[(thisF, thisP)] * \\\n math.exp(actionProbs[action] - totalWeight)\n actionTable[action] = pThisAction\n pTrans += pThisAction\n if pTrans == 0:\n return float('-inf')\n logProb += math.log(pTrans)\n\n # Choose which action we are taking.\n for action in actionTable:\n actionTable[action] /= pTrans\n thisAction = randomSample(actionTable) #random!\n\n # Update the agent's guess of the hidden states.\n nextEstState = [0.0]*self.P\n thisObs = randomSample(self.obs(lastF, lastP)) #random!\n for guessP in range(self.P):\n # What is the probability we are in state guessP?\n pGuessP = estState[guessP] * self.obs(lastF, guessP)[thisObs]\n # Given that we are in state guessP, what is the probability that\n # we move to each new state in P?\n newStates = self.trans(lastF, guessP)[thisAction]\n for newState, prob in newStates.iteritems():\n if newState[0] == thisF:\n nextEstState[newState[1]] += pGuessP * prob\n # Normalize nextEstState.\n estState = [i/sum(nextEstState) for i in nextEstState]\n return logProb", "def prob_given_state(self, start=1, end=len(self.T)): # , start, end):\n\n # for state_index in range(len(self.tagset)):\n # self.alpha[1][state_index] = 0\n\n raise NotImplementedError", "def simulate(params,n_states,n_trials,env = \"rich\", policy=\"softmax\",\\\n D=0.5, mod = \"constant\",thresh = 0, k=1,rnd_seeds = None, V0=0.0, full=False,\n rmag = 1, lmag = 0):\n\n\tdef calc_D(state):\n\t\t\"\"\"\n\t\tcalculates D for the current trial and returns\n\t\tthe updated state tracker for D and respective betas\n\n\t\tD represents dopamine levels (equivalent of rho in OpAL)\n\t\tScales between 0 and 1, with 1 high level of DA\n\t\t\"\"\"\n\t\tif t < thresh:\n\t\t\tstate.D_g[t] = 0.5\n\t\t\tstate.D_n[t] = 0.5\n\t\telse:\n\t\t\tif mod == \"constant\":\n\t\t\t\tstate.D_g[t] = D\n\t\t\t\tstate.D_n[t] = 1-D\n\t\t\tif mod == \"value\":\n\t\t\t\t# NOTE: if rmag and lmag is 1/0, can just use V\n\t\t\t\t# average of two actions\n\t\t\t\tV = np.mean(1/2*(state.QG[t,:] - state.QN[t,:])) # state average(?) \n\t\t\t\tV = 1/(1 + np.exp(-V*k)) # translate between 0 and 1\n\t\t\t\tstate.D_g[t] = V \n\t\t\t\tstate.D_n[t] = 1 - V\n\t\treturn state\n\n\n\tdef generate_state():\n\t\t\"\"\"\n\t\tGet appropriate reward probabilities and magnitudes\n\t\tfor the specified environment type\n\t\t\"\"\"\n\n\t\tprobs = calc_probs(env)\n\t\tn_options = len(probs)\n\n\t\t# feedback for agent\n\t\tr_mag = np.zeros(n_options) + rmag\n\t\tl_mag = np.zeros(n_options) + lmag\n\n\t\tnew_state = Bogacz(n_trials, n_options, probs, r_mag, l_mag, V0=V0)\n\t\treturn new_state\n\n\n\t# learning rate, damping, decay, softmax temp\n\talpha_a, epsilon, lbda, beta = params\n\tstates = []\n\n\t# do the thing\n\tfor s in np.arange(n_states):\n\n\t\t# check if random seed provided\n\t\tif rnd_seeds is not None:\n\t\t\trandom.seed(rnd_seeds[s])\n\t\t\tnp.random.seed(rnd_seeds[s])\n\n\t\tstate = generate_state()\n\t\tfor t in range(n_trials):\n\n\t\t\tstate.idx = t\n\t\t\tstate=calc_D(state)\t\t\t\t\t# get D\n\t\t\tstate.policy_softmax(beta)\n\t\t\tstate.act(alpha_a, epsilon, lbda)\t# update \n\n\t\t\tif full:\n\t\t\t\tstate.update_other_actions(alpha_a, epsilon, lbda)\n\n\t\tstates.append(state)\t\t\t\t\t# save sim\n\n\treturn states", "def calculate_policy(self, state):\n # short aliases\n s = state # s stands for state\n g = self.config['gamma'] # g stands for gamma\n n = self.action_space.n # n stands for the number of actions\n pi_s = self.policy[state] # pi_s stands for the policy in state s\n\n sum_weights = sum(self.weights[s])\n\n # the policy is a probability vector, giving the probability of each action\n pi_s = [((1 - g) * w / sum_weights) + (g / n) for w in self.weights[s]]\n # print(state, pi_s)\n return pi_s", "def policy_evaluation(P, nS, nA, policy, gamma=0.9, tol=1e-8):\n value_function = np.zeros(nS)\n ############################\n # YOUR IMPLEMENTATION HERE #\n def next_state_reward(P,state,action,gamma,value_function):\n sum_reward=0\n for p,nextS,r,boolean_v in P[state][action]:\n sum_reward+=p*( r + gamma* value_function[nextS])\n #print(sum_reward) \n return sum_reward\n\n while True:\n delta=0;\n for state in range(nS):\n new_value=0;\n for action in range(nA):\n sum_reward=next_state_reward(P,state,action,gamma,value_function)\n new_value+=policy[state][action]*sum_reward\n delta= max(delta, abs(new_value-value_function[state]))\n value_function[state] = new_value\n #print(value_function)\n if(delta < tol):\n break\n\n ############################\n return value_function", "def maxEntIRL(trans_mat, state_features, demos, seed_weights, n_epochs, horizon, learning_rate):\n\tfeature_exp = find_feature_expectations(state_features, demos)\n\n\tn_states = np.shape(trans_mat)[0]\n\tn_actions = np.shape(trans_mat)[1]\n\n\tn_features = np.shape(state_features)[1]\n\tr_weights = np.zeros(n_features) + seed_weights\n\n\t# Probability for initial state trajectories\n\tstart_state_count = np.zeros(n_states)\n\tfor demo in demos:\n\t\tstart_state_count[demo[0]] += 1\n\t\tp_start_dist = start_state_count / np.shape(demos)[0]\n\n\t# Iterate\n\tfor epoch in range(n_epochs):\n\t\t# print(\"epoch: {}\".format(epoch))\n\n\t\t# Calculate Max Ent Policy\n\t\tpolicy = calcMaxEntPolicy(trans_mat, horizon, r_weights, state_features)\n\n\t\t# Calculate Expected State Frequency\n\t\texpected_svf = calcExpectedStateFreq(trans_mat, horizon, p_start_dist, policy)\n\n\t\t# Update reward weights using gradient\n\t\tgradient = feature_exp - expected_svf.T.dot(state_features)\n\t\tr_weights += learning_rate * gradient\n\t\tprint epoch, np.linalg.norm(gradient)\n\n\tprint policy\n\tprint policy.argmax(axis=1)\n\treturn r_weights", "def policy_gradient(state, weight):\n # first calculate policy using the policy function above\n Policy = policy(state, weight)\n # get action from policy\n action = np.random.choice(len(Policy[0]), p=Policy[0])\n # reshape single feature from policy\n s = Policy.reshape(-1, 1)\n # apply softmax function to s and access value at action\n softmax = (np.diagflat(s) - np.dot(s, s.T))[action, :]\n # calculate the dlog as softmax / policy at action\n dlog = softmax / Policy[0, action]\n # find gradient from input state matrix using dlog\n gradient = state.T.dot(dlog[None, :])\n # return action and the policy gradient\n return action, gradient", "def act(self, state: State) -> Distribution:\n return self._gen_behaviour(self._gen_policy_params(state))", "def policy_eval(policy, env, discount_factor=1.0, theta=0.00001):\n\n # Start with a random (all 0) value function\n V = np.zeros(env.nS)\n \n while True: #any(Vdiff > theta):\n \n delta_V = 0\n\n for i in range(env.nS):\n \n # need to calculate the value of taking each of the available actions\n\n action_val = np.zeros(env.nA)\n\n for a in range(env.nA):\n \n # get transition tuple for this state and action\n tup = env.P[i][a][0]\n \n # calculate the value of this action/state? \n # value = reward + gamma * (prob * V[next_state])\n # error here I think, probability missing\n action_val[a] = tup[0] * (tup[2] + discount_factor * V[tup[1]])\n \n \n Vold = V[i]\n Vnew = np.dot(policy[i],action_val)\n delta_V = max(delta_V,np.abs(Vnew - Vold))\n # get state value by multiplying probability of taking action (policy) by action value\n V[i] = Vnew\n \n #print(action_val)\n #print(policy[i])\n #print(V[i])\n #print(delta_V)\n\n # function only works if I use this delta rule to terminate\n if delta_V < theta:\n break\n return np.array(V)", "def forwardVariableGeneration(self):\n self.alpha = zeros((self.noOfEmmittingStates+2, self.T + 1))\n\n # initialistation\n self.alpha[0,0] = 1.0\n self.alpha[1:,0] = 0.0\n self.alpha[0,1:] = 0.0\n\n # main recursion\n for t in range(1, self.T+1):\n for j in range(1, self.noOfEmmittingStates+1):\n partialSum = 0\n for k in range(self.noOfEmmittingStates+1):\n partialSum += (self.alpha[k, t-1] * self.transitionMatrix[k, j-1])\n self.alpha[j, t] = self.b[j-1, t-1] * partialSum\n # since must end in final state, last alpha for states with zero transition\n # prob to last state must be zero?\n for row in range(self.transitionMatrix.shape[0]):\n if self.transitionMatrix[row,-1] == 0.0:\n self.alpha[row,-1] = 0.0\n # fwd prob variable for final state at 'last' timestep gets bumped into the\n # final column to save having a needless column\n partialSum = 0\n for k in range(self.noOfEmmittingStates+1):\n partialSum += (self.alpha[k,-1] * self.transitionMatrix[k,-1])\n self.alpha[-1,-1] = partialSum\n\n # likelihood of observed sequence, p(O|lambda)\n self.observationLikelihood = self.alpha[-1,-1]", "def update_policy(env, policy, V, discount_factor):\n\n for state in range(env.nS):\n # for a given state compute state-action value.\n action_values = one_step_lookahead(env, state, V, discount_factor)\n\n # choose the action which maximizes the state-action value.\n policy[state] = np.argmax(action_values)\n\n return policy", "def policy_improvement(P, nS, nA, value_from_policy, policy, gamma=0.9):\n\n\tnew_policy = np.zeros(nS, dtype='int')\n\n\t############################\n\t# YOUR IMPLEMENTATION HERE #\n\tfor s in range(nS):\n\t\tq_values = np.zeros(nA)\n\t\tfor action in range(nA):\n\t\t\tcurrent_q_value = 0\n\t\t\tfor transition in P[s][action]:\t# for every possible transition\n\t\t\t\t# print(len(P[s][action]))\n\t\t\t\tprobability = transition[0]\n\t\t\t\treward = transition[2]\n\t\t\t\tnext_state = transition[1]\n\t\t\t\tvalue_next_state = value_from_policy[next_state]\n\n\t\t\t\tcurrent_q_value += probability * (reward + gamma * value_next_state)\n\n\t\t\tq_values[action] = current_q_value\n\n\t\tnew_policy[s] = np.argmax(q_values)\n\n\n\t# print(new_policy)\n\t############################\n\treturn new_policy", "def agent_start(self, state):\n self.sum_rewards = 0\n self.episode_steps = 0\n self.last_state = np.array(state)\n self.last_action = self.policy(self.last_state)\n return self.last_action", "def random_start_probs(self) -> np.ndarray:\n return self.random_state.dirichlet(np.ones(self.n_states), size=1).flatten()", "def policy_iteration(env, discount_factor=0.999, max_iteration=1000):\n # intialize the state-Value function\n V = np.zeros(env.nS)\n delta = []\n\n # intialize a random policy\n policy = np.random.randint(0, 4, env.nS)\n policy_prev = np.copy(policy)\n\n for i in range(max_iteration):\n\n # evaluate given policy\n V = policy_eval(env, policy, V, discount_factor)\n\n # improve policy\n policy = update_policy(env, policy, V, discount_factor)\n\n delta.append(np.sum(np.abs(policy-policy_prev)))\n\n # if policy not changed over 10 iterations it converged.\n if i % 10 == 0:\n if (np.all(np.equal(policy, policy_prev))):\n print('policy converged at iteration %d' % (i + 1))\n break\n policy_prev = np.copy(policy)\n\n return V, policy, delta", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n # OUR CODE HERE\n possibleActions = self.mdp.getPossibleActions(state)\n #checking for terminal state (no possible actions)\n if len(possibleActions) is 0: \n return None\n \n #attempt at using the Counter\n eValsActions = util.Counter()\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n return eValsActions.argMax()\n \n #fail attempt using lists :(\n \"\"\"\n #list to hold the expected value of the actions\n eValsActions = []\n #iterate through all actions and their transtion states\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n #expected value of reward with discount * the value of the transitions\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n #now iterate through and find the action with the best value\n #(that will be the best action)\n maxVal = -float(\"inf\")\n bestAction = None\n for action in possibleActions:\n if eValsActions[action] > maxVal:\n maxVal = eValsAction[action]\n bestAction = action\n \"\"\"\n return action\n # END OUR CODE", "def _starting_prob(self, s):\n return self._starting_state_distribution.pdf(s)", "def initializeDistribution(self):\n self.steadyStatePb = self.computeSteadyStatePb(self.transition)\n for key, value in self.mapping.items():\n try:\n self.mapping[key] = self.steadyStatePb[value - 1]\n except IndexError:\n self.raiseAnError(IOError, \"Index \",value, \" for outcome \", key, \" is out of bounds! Maximum index should be \", len(self.steadyStatePb))\n super().initializeDistribution()", "def policy_improvement(P, nS, nA, value_from_policy, gamma=0.9):\n\n new_policy = np.ones([nS, nA]) / nA\n\t############################\n\t# YOUR IMPLEMENTATION HERE #\n #iteration_policy=new_policy\n for state in range(nS):\n #current_policy=new_policy[state] \n action_policy = np.zeros(nA) \n for action in range(nA):\n for p,nextS,r,boolean_v in P[state][action]:\n action_policy[action] += p*( r + gamma* value_from_policy[nextS])\n #print(action_policy)\n updated_policy=np.zeros(nA)\n updated_policy[np.argmax(action_policy)]= 1\n #print(updated_policy) \n new_policy[state]=updated_policy\n \n \t############################\n return new_policy", "def create_greedy_policy(self):\n\n def policy_fn(state):\n return self.actor_baseline.predict([[state]])[0][0]\n\n return policy_fn", "def policy_iteration(P, nS, nA, gamma=0.9, tol=10e-3):\n\n\tpolicy = np.zeros(nS, dtype=int)\n\twhile True: # while policy changes for at least one state\n\t\tvalue = policy_evaluation(P, nS, nA, policy, gamma=0.9, tol=1e-3)\n\t\tnew_policy = policy_improvement(P, nS, nA, value, policy, gamma=0.9)\n\t\tif np.linalg.norm((policy - new_policy), ord=1) == 0:\n\t\t\tbreak\n\t\tpolicy = new_policy\n\treturn value, policy", "def policy_improvement(P, nS, nA, value_from_policy, gamma=0.9):\n\n new_policy = np.ones([nS, nA]) / nA\n for state_idx in range(nS):\n new_action_idx = np.argmax(calc_action_function(state_idx, value_from_policy, nA, P, gamma))\n new_policy[state_idx] = np.eye(nA)[new_action_idx]\n return new_policy", "def policy_update(self):\n mini_batch = random.sample(self.data_buffer, self.batch_size)\n state_batch = [data[0] for data in mini_batch]\n mcts_probs_batch = [data[1] for data in mini_batch]\n winner_batch = [data[2] for data in mini_batch]\n loss, entropy = self.policy_value_net.train_step(\n state_batch,\n mcts_probs_batch,\n winner_batch,\n self.learn_rate)\n return loss, entropy", "def get_target_distribution(\n next_states, rewards, mask, gamma, target_estimator, support\n):\n bsz = rewards.shape[0]\n bsz_ = next_states.shape[0]\n bin_no = support.shape[0]\n v_min, v_max = support[0].item(), support[-1].item()\n delta_z = (v_max - v_min) / (bin_no - 1)\n\n probs = target_estimator(next_states, probs=True)\n qs = torch.mul(probs, support.expand_as(probs))\n argmax_a = qs.sum(2).max(1)[1].unsqueeze(1).unsqueeze(1)\n action_mask = argmax_a.expand(bsz_, 1, bin_no)\n _qa_probs = probs.gather(1, action_mask).squeeze()\n\n # Next-states batch can be smaller so we scatter qa_probs in\n # a tensor the size of the full batch with each row summing to 1\n qa_probs = torch.eye(bsz, bin_no, device=_qa_probs.device)\n qa_probs.masked_scatter_(mask.expand_as(qa_probs), _qa_probs)\n\n # Mask gamma and reshape it torgether with rewards to fit p(x,a).\n rewards = rewards.expand_as(qa_probs)\n gamma = (mask.float() * gamma).expand_as(qa_probs)\n\n # Compute projection of the application of the Bellman operator.\n bellman_op = rewards + gamma * support.unsqueeze(0).expand_as(rewards)\n bellman_op = torch.clamp(bellman_op, v_min, v_max)\n\n # Compute categorical indices for distributing the probability\n m = torch.zeros(bsz, bin_no, device=qa_probs.device)\n b = (bellman_op - v_min) / delta_z\n l = b.floor().long()\n u = b.ceil().long()\n\n # Fix disappearing probability mass when l = b = u (b is int)\n l[(u > 0) * (l == u)] -= 1\n u[(l < (bin_no - 1)) * (l == u)] += 1\n\n # Distribute probability\n \"\"\"\n for i in range(bsz):\n for j in range(self.bin_no):\n uidx = u[i][j]\n lidx = l[i][j]\n m[i][lidx] = m[i][lidx] + qa_probs[i][j] * (uidx - b[i][j])\n m[i][uidx] = m[i][uidx] + qa_probs[i][j] * (b[i][j] - lidx)\n for i in range(bsz):\n m[i].index_add_(0, l[i], qa_probs[i] * (u[i].float() - b[i]))\n m[i].index_add_(0, u[i], qa_probs[i] * (b[i] - l[i].float()))\n \"\"\"\n # Optimized by https://github.com/tudor-berariu\n offset = (\n torch.linspace(0, ((bsz - 1) * bin_no), bsz, device=qa_probs.device)\n .long()\n .unsqueeze(1)\n .expand(bsz, bin_no)\n )\n\n m.view(-1).index_add_(\n 0, (l + offset).view(-1), (qa_probs * (u.float() - b)).view(-1)\n )\n m.view(-1).index_add_(\n 0, (u + offset).view(-1), (qa_probs * (b - l.float())).view(-1)\n )\n return m, probs", "def uniform_start_probs(self) -> np.ndarray:\n return np.ones(self.n_states) / self.n_states", "def update_policy(self):\n # this is update_policy \n # sample batch of 32 from the memory\n batch_of_samples = self.replay_memory.sample(batch_size=32)\n current_state_samples = batch_of_samples['current_state_samples']\n next_state_samples = batch_of_samples['next_state_samples']\n #print type(current_state_samples[0])\n #print current_state_samples\n\n # fetch stuff we need from samples 32*84*84*4\n current_state_images = np.zeros([1, 84, 84, 4])\n #print current_state_samples\n current_state_images[0,...] = np.dstack([sample.state for sample in current_state_samples])\n\n next_state_images = np.zeros([1, 84, 84, 4])\n next_state_images[0,...] = np.dstack([sample.state for sample in next_state_samples])\n\n # preprocess\n current_state_images = self.preprocessor.process_batch(current_state_images)\n next_state_images = self.preprocessor.process_batch(next_state_images)\n # print \"current_state_images {} max {} \".format(current_state_images.shape, np.max(current_state_images))\n #print current_state_images.shape\n q_current = self.q_network.predict(current_state_images,batch_size=self.batch_size) # 32*num_actions\n q_next = self.q_network.predict(next_state_images,batch_size=self.batch_size)\n\n # targets\n y_targets_all = q_current #1*num_actions\n #print y_targets_all.shape # [1,6]\n idx = 0 \n last_sample = current_state_samples[-1]\n if last_sample.is_terminal:\n y_targets_all[idx, last_sample.action] = last_sample.reward\n else:\n if self.mode == 'vanilla':\n y_targets_all[idx, last_sample.action] = np.float32(last_sample.reward) + self.gamma*np.max(q_next[idx])\n if self.mode == 'double': \n y_targets_all[idx, last_sample.action] = np.float32(last_sample.reward) + self.gamma*q_next[idx, np.argmax(q_current[idx])] \n\n loss = self.q_network.train_on_batch(current_state_images, np.float32(y_targets_all))\n\n with tf.name_scope('summaries'):\n self.tf_log_scaler(tag='train_loss', value=loss, step=self.iter_ctr)\n\n if not (self.iter_ctr % self.log_loss_every_nth):\n self.dump_train_loss(loss)\n\n # if (self.iter_ctr > (self.num_burn_in+1)) and not(self.iter_ctr%self.target_update_freq):\n # # copy weights\n # print \"Iter {} Updating target Q network\".format(self.iter_ctr)\n # self.target_q_network.set_weights(self.q_network.get_weights())\n # [self.target_q_network.trainable_weights[i].assign(self.q_network.trainable_weights[i]) \\\n # for i in range(len(self.target_q_network.trainable_weights))]", "def __call__(self, state):\n if random.random() > self._epsilon:\n return self._max_policy(state)\n return random.choice(np.arange(self._action_size))", "def forward_step(self, state, return_probs_vector=False):\n state_id = self.env.get_num_from_state(state)\n probs = (\n sum(\n self.env.T_matrix[state_id * self.env.nA + a, :]\n * self.policy[state_id, a]\n for a in range(self.env.nA)\n )\n .toarray()\n .squeeze()\n )\n if return_probs_vector:\n return probs\n else:\n return self._get_states(probs)", "def mc_importance_sampling(env, behavior_policy, target_policy, num_episodes, discount_factor=1.0,\n sampling_function=sample_episode):\n\n # Keeps track of current V and count of returns for each state\n # to calculate an update.\n V = defaultdict(float)\n returns_count = defaultdict(float)\n \n # YOUR CODE HERE\n \n epsilon = 1e-6\n \n \n # Due to the structure of the gym environment, it is not trivial to map the entire state space\n # so we only map the state space of the BlackJack env\n count_zeros = False\n if (isinstance(env.observation_space, gym.spaces.tuple_space.Tuple)):\n if (len(env.observation_space.spaces) == 3):\n count_zeros = True\n \n state_tuples = [(first, second, bool(third)) for first in range(2,env.observation_space.spaces[0].n)\n for second in range(1,env.observation_space.spaces[1].n)\n for third in range(env.observation_space.spaces[2].n)]\n returns = {state_tuple: [] for state_tuple in state_tuples}\n \n if count_zeros:\n returns_count = Counter({state_tuple: 0 for state_tuple in state_tuples})\n \n for episode in tqdm(range(num_episodes)): # num_episodes\n \n env.reset()\n states, actions, rewards, dones = sampling_function(env, behavior_policy)\n p_return = 0\n \n pi = target_policy.get_probs(states, actions)\n b = (behavior_policy.get_probs(states, actions) + epsilon)\n pi_div_b = target_policy.get_probs(states, actions) / (behavior_policy.get_probs(states, actions) + epsilon)\n\n for index in reversed(range(len(states))): # Reverse so we loop in opposite direction through timesteps\n c_state = states[index]\n c_action = actions[index]\n c_reward = rewards[index]\n\n p_return = discount_factor * p_return + c_reward\n W = np.cumprod(pi_div_b[index:])\n \n p_return = W[0] * p_return\n if len(returns[c_state]) == 0:\n returns[c_state] = [p_return]\n else:\n returns[c_state].append(p_return)\n\n if count_zeros:\n returns_count[c_state] += 1\n \n V = {state: np.nan_to_num(np.mean(value)) for (state, value) in returns.items()}\n \n if count_zeros:\n zero_counts = [True for item in list(returns_count) if returns_count[item] == 0]\n no_of_zero = sum(zero_counts)\n if no_of_zero>0:\n print(f\"Did not reach {no_of_zero} states in MC estimation. Value estimation for these states is missing.\")\n else:\n print(\"Reached all states in MC estimation.\")\n \n return V", "def policyIteration(P,R,gamma,theta,initial_policy,max_iter=1000000):\n policy_stable = False\n policy = np.copy(initial_policy)\n num_iter = 0\n \n while (not policy_stable) and num_iter < max_iter:\n num_iter += 1\n print('Policy Iteration: ', num_iter)\n # policy evaluation\n v = policyEval(policy,P,R,gamma,theta)\n # policy improvement\n policy, policy_stable = policyImprv(P,R,gamma,policy,v)\n return policy, v", "def act(self, state):\n state = torch.from_numpy(state).float().unsqueeze(0)\n logits = self.forward(state)\n distribution = torch.distributions.Categorical(logits=logits)\n action = distribution.sample()\n log_prob = distribution.log_prob(action).unsqueeze(-1)\n entropy = distribution.entropy().unsqueeze(-1)\n return action.item(), log_prob, entropy", "def policy_evaluation(P, nS, nA, policy, gamma=0.9, tol=1e-8):\n value_function = np.zeros(nS)\n\n while True:\n change = 0\n for state_idx in range(nS):\n v = 0\n for action_idx, action_prob in enumerate(policy[state_idx]): # for each state in nA\n for probability, nextstate, reward, terminal in P[state_idx][action_idx]:\n v += action_prob * probability * (reward + gamma * value_function[nextstate])\n change = max(change, abs(v - value_function[state_idx]))\n value_function[state_idx] = v\n if change < tol:\n break\n return value_function", "def _precompute_probabilities(self):\n\n d_graph = self.d_graph\n first_travel_done = set()\n\n nodes_generator = self.graph.nodes() if self.quiet \\\n else tqdm(self.graph.nodes(), desc='Computing transition probabilities')\n\n for source in nodes_generator:\n\n # Init probabilities dict for first travel\n if self.PROBABILITIES_KEY not in d_graph[source]:\n d_graph[source][self.PROBABILITIES_KEY] = dict()\n\n for current_node in self.graph.neighbors(source):\n\n # Init probabilities dict\n if self.PROBABILITIES_KEY not in d_graph[current_node]:\n d_graph[current_node][self.PROBABILITIES_KEY] = dict()\n\n unnormalized_weights = list()\n first_travel_weights = list()\n d_neighbors = list()\n\n # Calculate unnormalized weights\n for destination in self.graph.neighbors(current_node):\n\n p = self.sampling_strategy[current_node].get(self.P_KEY,\n self.p) if current_node in self.sampling_strategy else self.p\n q = self.sampling_strategy[current_node].get(self.Q_KEY,\n self.q) if current_node in self.sampling_strategy else self.q\n\n if destination == source: # Backwards probability\n ss_weight = self.graph[current_node][destination].get(self.weight_key, 1) * 1 / p\n elif destination in self.graph[source]: # If the neighbor is connected to the source\n ss_weight = self.graph[current_node][destination].get(self.weight_key, 1)\n else:\n ss_weight = self.graph[current_node][destination].get(self.weight_key, 1) * 1 / q\n\n # Assign the unnormalized sampling strategy weight, normalize during random walk\n unnormalized_weights.append(ss_weight)\n if current_node not in first_travel_done:\n first_travel_weights.append(self.graph[current_node][destination].get(self.weight_key, 1))\n d_neighbors.append(destination)\n\n # Normalize\n unnormalized_weights = np.array(unnormalized_weights)\n d_graph[current_node][self.PROBABILITIES_KEY][\n source] = unnormalized_weights / unnormalized_weights.sum()\n\n if current_node not in first_travel_done:\n unnormalized_weights = np.array(first_travel_weights)\n d_graph[current_node][self.FIRST_TRAVEL_KEY] = unnormalized_weights / unnormalized_weights.sum()\n first_travel_done.add(current_node)\n\n # Save neighbors\n d_graph[current_node][self.NEIGHBORS_KEY] = d_neighbors", "def max_weight_policy(self, state: types.StateSpace, eps: float = 1e-6) \\\n -> types.ActionSpace:\n num_activities = self.env.constituency_matrix.shape[1]\n z_star = np.zeros((num_activities, 1))\n for s in self.env.constituency_matrix:\n ind_activities_s = np.argwhere(s > 0)\n max_theta_s, list_max_activity = get_max_gain_station_s(\n ind_activities_s, state, self.env.job_generator.buffer_processing_matrix,\n self.weight_per_buffer)\n if max_theta_s < -eps:\n z_star[ind_activities_s, :] = 0\n else:\n num_positive_actions = 0\n ind_positive_actions = []\n for j in list_max_activity:\n ind_drained_buffer = np.argwhere(\n self.env.job_generator.buffer_processing_matrix[:, j] < 0)\n if state[ind_drained_buffer] >= 1 - eps:\n ind_positive_actions.append(j)\n num_positive_actions += 1\n if num_positive_actions > 0:\n z_star[ind_positive_actions] = 1 / num_positive_actions\n return z_star", "def priorProb(self, state):\n actions = []\n for i in range(0, 10):\n actions.append(((i, i+1), random.uniform(0, 1))) \n \n return actions", "def policies(self, QTable, epsilon, state, next_states, action_to_do): # Inspiration from https://www.geeksforgeeks.org/q-learning-in-python/?fbclid=IwAR1UXR88IuJBhhTakjxNq_gcf3nCmJB0puuoA46J8mZnEan_qx9hhoFzhK8\r\n num_actions = 5 # 5 actions-value, [moved_out, into_goal, send_opp_home, send_self_home, move_token] \r\n def epsilonGreedyPolicy(): \r\n tmp_state = str(state.state[0])\r\n valid_actions = np.append(action_to_do, True) # the True appended is move_token\r\n valid_act_len = len(np.where(valid_actions==True)[0])\r\n\r\n Action_probabilities = np.ones(num_actions, dtype = float) * epsilon / valid_act_len # divides probability based on number of valid actions and epsilon (each 0.025 if 4 actions) \r\n Action_probabilities = np.multiply(Action_probabilities, valid_actions)\r\n\r\n # If same values in QTable choose random valid action \r\n best_action = np.argmax(QTable[tmp_state]) # Find index of action which gives highest QValue\r\n # Check if valid action else find new best action\r\n if not valid_actions[best_action]:\r\n actions = np.argsort(-QTable[tmp_state]) # descending order of action values\r\n for i in range(len(valid_actions)):\r\n if valid_actions[actions[i]]:\r\n best_action = actions[i]\r\n break\r\n\r\n Action_probabilities[best_action] += (1.0 - epsilon) # Assigns rest probability to best action so probability sums to 1\r\n\r\n return Action_probabilities \r\n\r\n def greedyPolicy():\r\n tmp_state = str(state.state[0])\r\n valid_actions = np.append(action_to_do, True) # the True appended is move_token\r\n\r\n Action_probabilities = np.zeros(num_actions, dtype = float)\r\n\r\n best_action = np.argmax(QTable[tmp_state]) # Find index of action which gives highest QValue\r\n # Check if valid action else find new best action\r\n if not valid_actions[best_action]:\r\n actions = np.argsort(-QTable[tmp_state]) # descending order of action values\r\n for i in range(len(valid_actions)):\r\n if valid_actions[actions[i]]:\r\n best_action = actions[i]\r\n break\r\n\r\n\r\n Action_probabilities[best_action] += 1.0\r\n return Action_probabilities\r\n\r\n\r\n if(self.__chosenPolicy == \"epsilon greedy\"):\r\n return epsilonGreedyPolicy \r\n if(self.__chosenPolicy == \"greedy\"):\r\n return greedyPolicy", "def build_posterior(self, policy, num_episodes, max_episode_length, test_every=np.inf, states_V_target=()):\n\n statistics = trange(num_episodes)\n test_error = np.array([])\n\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n \n state_sequence = np.empty((state.shape[0], max_episode_length+1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:,0]\n reward_sequence = np.empty(max_episode_length, dtype=np.float64, order='C')\n \n while ((num_steps < max_episode_length) and (not is_terminal)):\n num_steps+=1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n\n state_sequence[:, num_steps] = state[:,0]\n reward_sequence[num_steps-1] = reward\n\n state_sequence = state_sequence[:, 0:(num_steps+1)]\n reward_sequence = reward_sequence[0:num_steps]\n\n if (self.D.shape[1]==0):\n\n traj = state_sequence[:,0][:,np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:,0][:,np.newaxis])\n self.K_inv = 1/self.kernel(traj, traj)\n self.A = np.array([[1]])\n self.alpha_ = np.array([[0]])\n self.C_= np.array([[0]])\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.shape[1], cumm_cost=np.sum(reward_sequence))\n if (e%test_every==0 and len(states_V_target)==2):\n V = self.get_value_function(states_V_target[0])\n test_error = np.concatenate((test_error, np.array([np.mean(np.abs(V - states_V_target[1]))])))\n\n return test_error", "def _prepare_policy_input(self, state, legal_actions, step_rewards=None):\n check_for_nans(\"Raw state\", state)\n state_ = state.view(-1)\n\n if step_rewards is None or not step_rewards:\n step_rewards = [None for _ in legal_actions]\n batch_states = []\n\n assert legal_actions\n assert step_rewards\n assert len(legal_actions) == len(step_rewards)\n\n for action, log_likelihood in zip(legal_actions, step_rewards):\n action_ = self.action_factor * torch.tensor([action]).to(self.device, self.dtype)\n\n i, j = self.env.unwrap_action(action)\n pi = state[i, :]\n pj = state[j, :]\n check_for_nans(\"Individual momenta\", pi, pj)\n\n if self.log_likelihood_feature:\n if log_likelihood is None:\n log_likelihood = self._parse_action(action, from_which_env=\"real\")\n if not np.isfinite(log_likelihood):\n log_likelihood = 0.0\n log_likelihood = np.clip(log_likelihood, self.reward_range[0], self.reward_range[1])\n log_likelihood_ = self.log_likelihood_factor * torch.tensor([log_likelihood]).to(\n self.device, self.dtype\n )\n check_for_nans(\"Log likelihood as policy input\", log_likelihood_)\n\n combined_state = torch.cat((action_, pi, pj, log_likelihood_, state_), dim=0)\n check_for_nans(\"Individual policy input entry\", combined_state)\n else:\n combined_state = torch.cat((action_, pi, pj, state_), dim=0)\n check_for_nans(\"Individual policy input entry\", combined_state)\n\n batch_states.append(combined_state.unsqueeze(0))\n\n batch_states = torch.cat(batch_states, dim=0)\n check_for_nans(\"Concatenated policy input\", batch_states)\n return batch_states", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n possibleActions = self.mdp.getPossibleActions(state)\n if len(possibleActions) == 0: return None\n results = []\n for action in possibleActions:\n total = 0\n for (nextState, prob) in self.mdp.getTransitionStatesAndProbs(state,action):\n total += (prob * self.values[nextState])\n results.append(total)\n maxIndex = max(enumerate(results), key=lambda x: x[1])[0]\n #print(\"here\")\n return possibleActions[maxIndex]", "def policy_evaluation(P, nS, nA, policy, gamma=0.9, tol=1e-3):\n\n\tvalue_function = np.zeros(nS)\n\n\t############################\n\t# YOUR IMPLEMENTATION HERE #\n\n\twhile True:\n\t\tprevious_value_function = np.copy(value_function)\n\t\tfor s in range(nS):\n\t\t\taction = policy[s];\t# action specified by the policy\n\t\t\tcurrent_value = 0\n\t\t\tfor transition in P[s][action]:\t# for every possible transition\n\t\t\t\t# print(len(P[s][action]))\n\t\t\t\tprobability = transition[0]\n\t\t\t\treward = transition[2]\n\t\t\t\tnext_state = transition[1]\n\t\t\t\tvalue_next_state = previous_value_function[next_state]\n\n\t\t\t\t# print(\"probability: \" + str(probability) + \"reward: \" + str(reward) + \"value next state: \" + str(value_next_state))\n\n\t\t\t\tcurrent_value += probability * (reward + gamma * value_next_state)\n\t\t\t\n\t\t\tvalue_function[s] = current_value\n\n\t\t# find the maximum difference between the previous value and the current value\n\t\tdifArray = np.subtract(value_function, previous_value_function)\n\t\tfor i in range(nS):\n\t\t\tdifArray[i] = abs(difArray[i])\n\t\tmaxDif = np.amax(difArray)\n\n\t\tif (maxDif < tol):\n\t\t\tbreak\n\n\t############################\n\treturn value_function", "def update(self, state, action, nextState, reward):\n \"\"\"Description:\n Use second equation in slide 71 of MDP\n Adjest weight of active features depend on tranistion \n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n feat = self.featExtractor.getFeatures(state, action)\n\n # if weight is empty, then weight will need to initial to 1 for all features\n # According to which Extractor user choose, weight counter will have equal number of keys.\n if len(self.weight) == 0:\n feat = self.featExtractor.getFeatures(state, action)\n self.weight.incrementAll(feat.keys(), 1)\n \n maxQns = self.getValue(nextState)\n if maxQns == None:\n maxQns = 0\n Qsa = self.getQValue(state, action)\n difference = ( reward + self.discountRate * maxQns ) - Qsa\n \n for key in self.weight.keys():\n self.weight[key] += (self.alpha * difference * feat[key])\n \n \n \"\"\" END CODE \"\"\"", "def get_expected_states(qs, B, policy):\n n_steps = policy.shape[0]\n n_factors = policy.shape[1]\n\n # initialise posterior predictive density as a list of beliefs over time, including current posterior beliefs about hidden states as the first element\n qs_pi = [qs] + [utils.obj_array(n_factors) for t in range(n_steps)]\n \n # get expected states over time\n for t in range(n_steps):\n for control_factor, action in enumerate(policy[t,:]):\n qs_pi[t+1][control_factor] = B[control_factor][:,:,int(action)].dot(qs_pi[t][control_factor])\n\n return qs_pi[1:]", "def agent_step(self, reward, state):\n prev_val= self.state[self.prevAction]\n self.state[self.prevAction]=prev_val+self.alpha*(reward-prev_val)\n val=max(self.state)\n index=self.state.index(val)\n self.prevAction=index\n i=random.uniform(0,1)\n if i < 1-self.prob:\n self.prevAction=index\n return index\n else:\n index=random.randint(0,self.num_bandits-1)\n self.prevAction=index", "def policy (self,forced_actions=None,forced_rewards=None,state_idx=None):\n\t\tif self.gamble:\n\t\t\tself.policy_gamble()\n\t\t\treturn\n\t\tif self.UCB:\n\t\t\tself.policy_UCB(forced_actions,forced_rewards,state_idx)\n\t\t\treturn\n\n\t\tidx = self.idx \t\t\t\t# internal time index of state\n\t\tprobs = self.probs\t\t\t# prob of reward for an action\n\t\tbeta = self.beta\t\t\t# inverse temp \n\n\t\t# calc Act thalamus activation\n\t\tAct = beta*self.Q[idx,:]\n\n\t\t# multioption softmax (invariant to constant offsets)\n\t\tnewAct = Act - np.max(Act)\n\t\texpAct = np.exp(newAct)\n\t\tps = expAct/np.sum(expAct)\n\t\tself.SM[idx,:] = ps\n\t\tcs_ps = np.cumsum(ps)\n\n\t\t# select action\n\t\tif forced_actions is None:\n\t\t\tsample = np.random.random_sample()\n\t\t\tselected = False\n\t\t\tcheck = 0\n\t\t\twhile not selected:\n\t\t\t\tif sample < cs_ps[check]:\n\t\t\t\t\tC = check\n\t\t\t\t\tselected = True\n\t\t\t\telse:\n\t\t\t\t\tcheck = check + 1\n\t\telse:\n\t\t\tC = forced_actions[state_idx,idx]\n\t\tself.C[idx] = C\n\t\t\t\n\t\t# decide whether a reward is delivered\n\t\tif forced_rewards is None:\n\t\t\treward = np.random.binomial(size=1, n=1, p= probs[C])[0]\n\t\telse:\n\t\t\treward = forced_rewards[state_idx,idx]\n\t\tself.R[idx] = reward # indicator that reward was received\n\t\tif reward == 0:\n\t\t\treward = self.l_mag\n\t\telse:\n\t\t\treward = self.r_mag\n\n\t\tPE = reward - self.Q[idx,C]\n\t\tself.PE[idx] = PE", "def act(self, state):\n state = torch.from_numpy(state).float()\n logits, values = self.forward(state)\n distribution = torch.distributions.Categorical(logits=logits)\n action = distribution.sample()\n log_prob = distribution.log_prob(action).unsqueeze(-1)\n entropy = distribution.entropy().unsqueeze(-1)\n action = action.item() if len(action) == 1 else action.data.numpy()\n return action, log_prob, entropy, values", "def getPolicy(self, state):\n \"\"\"Description:\n Find all of q-values of current state, and choose the action \n with the hight q-value as optimal policy\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n legalActions = self.getLegalActions(state)\n action = None\n policy = util.Counter() # use counter to store action and its q-value\n \n if len(legalActions) == 0:\n return action\n \n for a in legalActions:\n policy[a] = self.getQValue(state, a)\n action = policy.argMax()\n return action\n\n \"\"\" END CODE \"\"\"", "def compute_expected_return_(domain, N, policy, p_init, s_init):\r\n pos = p_init\r\n speed = s_init\r\n expected_return = 0\r\n\r\n for i in range(N):\r\n if domain.terminalState(pos, speed):\r\n break\r\n action = policy((pos, speed))\r\n if isinstance(action,str):\r\n action = domain.getAction(action)\r\n new_pos, new_speed = domain.getNextState(pos, speed, action)\r\n r = domain.getReward(pos, speed, action, new_pos, new_speed)\r\n expected_return += ((domain.discount_factor)**i)*r\r\n pos = new_pos\r\n speed = new_speed\r\n \r\n return expected_return", "def positive_forward_satisficing(trial):\r\n root_nodes = trial.node_map[0].children.copy()\r\n shuffle(root_nodes)\r\n states = []\r\n for node in root_nodes:\r\n trial_copy = copy.deepcopy(trial)\r\n states.append(trial_copy)\r\n node.observe()\r\n if node.value > 0:\r\n break\r\n trial_copy = copy.deepcopy(trial)\r\n states.append(trial_copy)\r\n return zip(states, [node.label for node in trial.observed_nodes] + [0])", "def build_posterior(self, policy, num_episodes, max_episode_length):\n\n statistics = trange(num_episodes)\n\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n \n state_sequence = np.zeros((state.shape[0], max_episode_length+1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:,0]\n reward_sequence = np.zeros(max_episode_length, dtype=np.float64, order='C')\n \n while ((num_steps < max_episode_length) and (not is_terminal)):\n num_steps+=1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n\n state_sequence[:, num_steps] = state[:,0]\n reward_sequence[num_steps-1] = reward\n\n state_sequence = state_sequence[:, 0:(num_steps+1)]\n reward_sequence = reward_sequence[0:num_steps]\n\n if (self.D.shape[1]==0):\n\n traj = state_sequence[:,0][:,np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:,0][:,np.newaxis])\n self.K_inv = 1/self.kernel(traj, traj)\n self.A = np.array([[1]], dtype=np.float64, order='C')\n self.alpha_ = np.array([[0]], dtype=np.float64, order='C')\n self.C_= np.array([[0]], dtype=np.float64, order='C')\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.shape[1], cumm_cost=np.sum(reward_sequence))", "def forward_backward(observations):\n\n # -------------------------------------------------------------------------\n # YOUR CODE GOES HERE\n #\n # observations = [(4, 3), (4, 2), (3, 2), (4, 0), (2, 0), (2, 0), (3, 2), \n # (4, 2), (2, 3), (3, 5)]\n num_time_steps = len(observations)\n forward_messages = [None] * num_time_steps\n forward_messages[0] = prior_matrix\n # # # TODO: Compute the forward messages\n for i,x_i in enumerate(observations):\n if x_i:\n obs_index = obs_state_index_map[x_i]\n pi_0 = forward_messages[i]\n # print(len(B[:obs_index]))\n weights = np.multiply(pi_0, B[:,obs_index])\n # x = sum([A[j,:]* w_i.T for j,w_i in enumerate(weights)])\n else:\n weights = forward_messages[i]\n # print(weights)\n x = sum([A[j,:]* w_i.T for j,w_i in enumerate(weights)])\n if i+1 < len(forward_messages):\n forward_messages[i+1] = x#normalize(x)\n # break\n\n ## forward messages as dictionary\n # for_dict = [None]*num_time_steps\n # for j,f in enumerate(forward_messages):\n # x = Distribution()\n # for i,x_i in enumerate(f):\n # if x_i == 0:\n # continue\n # # print(i,x_i)\n # x[all_possible_hidden_states[i]] = x_i\n # for_dict[j] = x.renormalize()\n # print(for_dict[3])\n\n # print('--------------\\n-----------------\\n')\n\n\n backward_messages = [None] * num_time_steps\n # backward_messages[-1] = [1]*len(prior_matrix)\n message = np.ones(len(all_possible_hidden_states), dtype=np.float64)\n backward_messages[-1] = message/len(all_possible_hidden_states)\n \n# ****\n ## Backwards messages\n for i,x_i in enumerate(reversed(observations)):\n # print(x_i)\n if x_i:\n obs_index = obs_state_index_map[x_i]\n pi = backward_messages[-1-i]\n weights = np.multiply(pi, B[:,obs_index])\n else:\n weights = backward_messages[-1-i]\n # print(i)\n x = sum([A[:,j]*w_i for j,w_i in enumerate(weights)])\n\n if i+1 < len(backward_messages):\n backward_messages[-2-i] = x#normalize(x)\n\n ## backward messages as dictionary\n # back_dict = [None]*num_time_steps\n # for j,b in enumerate(backward_messages):\n # x = Distribution()\n # if b == None:\n # continue\n # for i,x_i in enumerate(b):\n # if x_i == 0 or x_i==None:\n # continue\n # # print(i,x_i)\n # x[all_possible_hidden_states[i]] = x_i\n # back_dict[j] = x.renormalize()\n \n # print(back_dict[0])\n # print(A[:10,:10])\n # print('\\n-----------------\\n', B[:10,:10])\n\n # print(backward_messages[2])\n # backward_messages[0] = forward_messages[0]\n # # ## marginals as matrix\n marginals = [None] * num_time_steps \n for i,x_i in enumerate(observations):\n if x_i:\n obs_index = obs_state_index_map[x_i]\n marginals[i] = np.multiply(np.multiply(backward_messages[i],\n forward_messages[i]),\n B[:,obs_index])\n else:\n marginals[i] = np.multiply(backward_messages[i],forward_messages[i])\n # if i == 0:\n # marginals[i] = np.multiply(backward_messages[i], B[:,obs_index])\n # elif i == len(observations)-1:\n # marginals[i] = np.multiply(forward_messages[i], B[:,obs_index])\n # else:\n\n ## marginals as dictionary\n marg_dict = [None]*num_time_steps\n for j,m in enumerate(marginals):\n x = Distribution()\n for i,x_i in enumerate(m):\n if x_i == 0 or x_i==None:\n continue\n x[all_possible_hidden_states[i]] = x_i\n marg_dict[j] = x.renormalize()\n # print(marginals[i])\n # print(A[:10, :10], '\\n')\n # print(B[:10, :10], '\\n')\n # print(marg_dict)\n return marg_dict", "def init_start_prob(n_states):\n start_prob_est = np.random.rand(n_states, 1)\n start_prob_est /= np.sum(start_prob_est, 0)\n assert np.isclose(np.sum(start_prob_est, 0), 1.)\n return start_prob_est", "def find_freq(self):\n state = self.env.reset()\n state_dim = len(state)\n seq = [state]\n\n for _ in range(self.args.exploration_iterations*10):\n action = np.random.randint(self.env.action_space.n)\n next_state, reward, done, info = self.env.step(action)\n seq.append(next_state)\n if done:\n state = self.env.reset()\n else:\n state = next_state\n\n # Create a primitive MDP for every unique state explored\n states = set(seq)\n for state in states:\n primitive_mdp = MDP(level=0, state_var=state)\n primitive_mdp.exits = {x for x in range(self.env.action_space.n)}\n primitive_mdp.mer = frozenset({state})\n primitive_mdp.primitive_states = {state}\n self.mdps[0].add(primitive_mdp)\n\n freq = [{'sv': i, 'last': None, 'changes': 0} for i in range(state_dim)]\n for state in seq:\n for i in range(state_dim):\n if freq[i]['last'] is None:\n freq[i]['last'] = state[i]\n else:\n if state[i] != freq[i]['last']:\n freq[i]['changes'] += 1\n freq[i]['last'] = state[i]\n\n sorted_freq = sorted(freq, key=lambda x: x['changes'], reverse=True)\n return [d['sv'] for d in sorted_freq], state_dim", "def _updateInitialProbabilities(self): \n N = self.N\n K = self.K\n\n for i in range(1,self.K+1):\n s = 0\n updated_prob = 0\n for n in range(1,self.N+1):\n s = s+1\n updated_prob = updated_prob + self.posterior_state_trellis[n][(1,i)]\n self.state_initial_prob[i] = (updated_prob/s)", "def bias_prior(self):", "def _policy_eval(self, policy: np.ndarray) -> np.ndarray:\n V = np.zeros(self.state_dim)\n diff = 1.0\n dr = 0.9\n while (diff >= self.theta):\n diff = 0.0\n for s in self.mdp._state_dict:\n old = V[self.mdp._state_dict[s]]\n temp = 0.0\n for opt in range(self.action_dim):\n if policy[self.mdp._state_dict[s],opt] == 1.0: \n for next_s in self.mdp._state_dict:\n p = self.mdp.P[self.mdp._state_dict[s],opt,self.mdp._state_dict[next_s]]\n r = self.mdp.R[self.mdp._state_dict[s],opt,self.mdp._state_dict[next_s]]\n Vs = V[self.mdp._state_dict[next_s]]\n temp = temp + p * (r + dr * Vs)\n V[self.mdp._state_dict[s]] = temp\n diff = max(diff,abs(old - V[self.mdp._state_dict[s]]))\n return V", "def policy_iteration_policy(problem):\n returns = {state: 0 for state in problem.states()}\n policy = {state: random_action(problem, state) for state in problem.states()}\n\n while True:\n returns = policy_evaluation(problem, policy, returns, epsilon=0.001)\n\n next_policy = {\n state: max(problem.state_actions(state), key=lambda action: (\n sum(returns[next_state] * next_state_p\n for next_state, next_state_p in (\n problem.state_action_result_dist(state, action).items()\n )\n )\n ))\n for state in problem.states()\n }\n\n if next_policy == policy:\n break\n\n policy = next_policy\n\n return policy", "def _policy_improvement(self) -> Tuple[np.ndarray, np.ndarray]:\n # Start with a (random) policy\n policy = np.zeros([self.state_dim, self.action_dim])\n V = np.zeros([self.state_dim])\n #random init the policy\n for s in range(self.state_dim):\n policy[s,0] = 0.0\n policy[s,1] = 0.0\n policy[s,2] = 1.0\n\n V = self._policy_eval(policy)\n\n policy_stable = False\n dr = 0.9\n\n while (policy_stable != True):\n policy_stable = True\n for s in self.mdp._state_dict:\n old_action = (policy[self.mdp._state_dict[s]]).tolist()\n action_dict = {}\n for a in self.mdp._action_dict:\n temp = 0.0\n for next_s in self.mdp._state_dict:\n p = self.mdp.P[self.mdp._state_dict[s],self.mdp._action_dict[a],self.mdp._state_dict[next_s]]\n r = self.mdp.R[self.mdp._state_dict[s],self.mdp._action_dict[a],self.mdp._state_dict[next_s]]\n Vs = V[self.mdp._state_dict[next_s]]\n temp = temp + p * (r + dr * Vs)\n action_dict[self.mdp._action_dict[a]]= temp \n max_act = max(action_dict.values())\n V[self.mdp._state_dict[s]] = max_act\n res = [t for t,v in action_dict.items() if v == max_act][0]\n for opt in range(self.action_dim):\n if opt == res:\n policy[self.mdp._state_dict[s],opt] = 1.0\n else:\n policy[self.mdp._state_dict[s],opt] = 0.0\n if (old_action - policy[self.mdp._state_dict[s]]).any() == True:\n \n policy_stable = False\n if policy_stable == False:\n V = self._policy_eval(policy)\n \n return policy, V", "def get_probs(self, states, actions):\n # YOUR CODE HERE\n \n # So we need to determine for every input state-action pair, what the resulting policy distribution is\n # This means that the input will be a single state and a single action per index. \n # We then need to determine if, according to our policy, the action should be taken (prob=1) \n # or not (prob=0)\n \n # state is a tuple of (player's current sum, dealer's single showing card, boolean for usable ace)\n probs = []\n for index, (state, action) in enumerate(zip(states, actions)):\n chosen_action = self.sample_action(state)\n if action == chosen_action:\n probs.append(1)\n else:\n probs.append(0)\n \n \n return np.array(probs)", "def policyImprv(P,R,gamma,policy,v):\n def one_step_lookahead(s, V):\n \"\"\"\n :param state: current state\n :param v: current value estimator\n :return: A, list of optimal action values under current value estimator\n \"\"\"\n num_a = policy.shape[1]\n A = np.zeros(num_a)\n for a in range(num_a):\n for s_prime in range(num_S):\n A[a] += P[s, a, s_prime] * (R[s, a, s_prime] + gamma * V[s_prime])\n return A\n\n # initialization \n num_S, num_a = policy.shape\n policy_stable = True\n\n for s in range(num_S):\n\n chosen_a = np.argmax(policy[s])\n\n action_values = one_step_lookahead(s, v)\n best_a = np.argmax(action_values)\n\n if chosen_a != best_a:\n policy_stable = False\n\n for i in range(num_a):\n if i != best_a:\n policy[s][i] = 0\n if i == best_a:\n policy[s][best_a] = 1\n return policy, policy_stable", "def action_distribution(self, state):\n means, stds = self.__call__(state)\n dist = Normal(means, torch.exp(stds))\n\n return dist", "def policy(self, state, training):\n explore_prob = self.max_explore - (self.steps * self.anneal_rate)#probabilidad de exploracion decreciente\n explore = max(explore_prob, self.min_explore) > np.random.rand()\n\n if training and explore: #hacer exploracion\n action = np.random.randint(self.action_space_size)\n else: #hacer explotacion\n inputs = np.expand_dims(state, 0)\n qvalues = self.online_network.model(inputs) #online or evalation network predicts q-values\n #print(\"***##qvalues\",qvalues)\n action = np.squeeze(np.argmax(qvalues, axis=-1))\n\n return action", "def predict(self, state):\n if self.phase is None or self.device is None:\n raise AttributeError('Phase, device attributes have to be set!')\n if self.phase == 'train' and self.epsilon is None:\n raise AttributeError('Epsilon attribute has to be set in training phase')\n\n if self.reach_destination(state):\n return ActionXY(0, 0) if self.kinematics == 'holonomic' else ActionRot(0, 0)\n if self.action_space is None:\n self.build_action_space(state.self_state.v_pref)\n\n occupancy_maps = None\n probability = np.random.random()\n if self.phase == 'train' and probability < self.epsilon:\n max_action = self.action_space[np.random.choice(len(self.action_space))]\n else:\n self.action_values = list()\n max_value = float('-inf')\n max_action = None\n for action in self.action_space:\n next_self_state = self.propagate(state.self_state, action)\n if self.query_env:\n next_human_states, reward, done, info = self.env.onestep_lookahead(action)\n else:\n next_human_states = [self.propagate(human_state, ActionXY(human_state.vx, human_state.vy))\n for human_state in state.human_states]\n reward = self.compute_reward(next_self_state, next_human_states)\n batch_next_states = torch.cat([torch.Tensor([next_self_state + next_human_state]).to(self.device)\n for next_human_state in next_human_states], dim=0)\n rotated_batch_input = self.rotate(batch_next_states).unsqueeze(0)\n if self.with_om:\n if occupancy_maps is None:\n occupancy_maps = self.build_occupancy_maps(next_human_states).unsqueeze(0)\n rotated_batch_input = torch.cat([rotated_batch_input, occupancy_maps], dim=2)\n # VALUE UPDATE\n next_state_value = self.model(rotated_batch_input).data.item()\n # value = reward + pow(self.gamma, self.time_step * state.self_state.v_pref) * next_state_value\n if self.kinematics == \"holonomic\":\n v = np.linalg.norm(np.array(action))\n value = reward + pow(self.gamma, self.time_step * v) * next_state_value\n else:\n value = reward + pow(self.gamma, self.time_step * action[0]) * next_state_value\n self.action_values.append(value)\n if value > max_value:\n max_value = value\n max_action = action\n \n if max_action is None:\n raise ValueError('Value network is not well trained. ')\n\n if self.phase == 'train':\n self.last_state = self.transform(state)\n # print(\"Action:V:%f,\\tR:%f\\t\"%(max_action.v, max_action.r))\n return max_action", "def policy_gamble (self):\n\t\tidx = self.idx \t\t\t\t# internal time index of state\n\t\tprobs = self.probs\t\t\t# prob of reward for an action\n\t\tbeta = self.beta\t\t\t# inverse temp \n\n\t\t# softmax\n\t\tAct = beta*self.Q[idx]\n\t\tp = 1./(1. + np.exp(-Act))\t# probability of gamble\n\t\tself.SM[idx] = p\n\n\t\t# decide whether to take gamble based on p\n\t\trnd = np.random.random_sample()\n\t\tif rnd < p:\n\t\t\tC = 1\t# gamble\n\t\telse:\n\t\t\tC = 0\t# no gamble\n\t\tself.C[idx] = C\n\n\t\t# no gamble\n\t\tif C == 0:\t\n\t\t\treward = 0\t\t # gamble reward encoded relative to reward\n\t\t\tself.R[idx] = -1 # rewarded sure thing, coded as -1\n\t\t\tself.PE[idx] = 0 # no PE, get the thing you expected\n\t\t# gamble\n\t\telse:\n\t\t\t# decide whether a reward is delivered\n\t\t\treward = np.random.binomial(size=1, n=1, p=probs)[0]\n\t\t\tself.R[idx] = reward # indicator that reward was received\n\t\t\tif reward == 0:\n\t\t\t\treward = self.l_mag\n\t\t\telse:\n\t\t\t\treward = self.r_mag\n\t\t\tself.PE[idx] = reward - self.Q[idx]", "def _gen_policy_params(self, state: State) -> Tensor:\n ...", "def prob_class_1_arrival(state, lambda_1, mu, num_of_servers):\n return lambda_1 / (lambda_1 + (mu * min(state[1], num_of_servers)))", "def policy_improvement(P, nS, nA, value_from_policy, policy, gamma=0.9):\n\n\tnew_policy = np.zeros(nS, dtype='int')\n\tfor state in range(nS): # for each state\n\t\tbest_Q_value = -float(\"inf\") # we seek the action that gives the best Q value from this state\n\t\tfor action in range(nA): # for each action\n\t\t\tp = P[state][action] # {(probability, nextstate, reward, terminal),...}[state,action]\n\t\t\treward = sum([p_i[0]*p_i[2] for p_i in p]) # expected reward from state,action\n\t\t\tQ_value = reward + gamma*(sum([p_i[0]*value_from_policy[p_i[1]] for p_i in p])) # expected reward + gamma * expected next value\n\t\t\tif Q_value > best_Q_value: # if this is the best action from this state so far\n\t\t\t\tbest_Q_value = Q_value\n\t\t\t\tnew_policy[state] = action # update policy\n\treturn new_policy", "def log_prob_increase(target_distribution, x0, xs, accepteds):\n return target_distribution.log_probability(xs[-1]) - target_distribution.log_probability(x0)", "def _evaluate_policy(self, state, legal_actions, step_rewards=None, action=None):\n if action is not None:\n return torch.tensor(1.0 / len(legal_actions), dtype=self.dtype)\n else:\n return 1.0 / len(legal_actions) * torch.ones(len(legal_actions), dtype=self.dtype)", "def get_probs(self, states, actions):\n # YOUR CODE HERE\n \n probs = np.ones(len(states))/2\n return probs", "def forward(self, latent_obs, obs_vars, action, initial_mean, initial_cov, obs_valid=None):\n\n # tif you need a version that also returns the prior uncomment the respective parts below\n # prepare list for return\n\n prior_mean_list = []\n prior_cov_list = [[], [], []]\n\n post_mean_list = []\n post_cov_list = [[], [], []]\n\n\n # initialize prior\n prior_mean, prior_cov = initial_mean, initial_cov\n\n # actual computation\n for i in range(latent_obs.shape[1]):\n\n cur_obs_valid = obs_valid[:, i] if obs_valid is not None else None\n\n # update belief with updateLayer\n post_mean, post_cov = \\\n self._update(prior_mean, prior_cov, latent_obs[:, i], obs_vars[:, i], cur_obs_valid)\n\n post_mean_list.append(post_mean)\n [post_cov_list[i].append(post_cov[i]) for i in range(3)]\n import numpy as np\n\n # predict next belief state ahead in time\n next_prior_mean, next_prior_cov = self._predict(post_mean, post_cov, action[:, i])\n\n prior_mean_list.append(next_prior_mean)\n [prior_cov_list[i].append(next_prior_cov[i]) for i in range(3)]\n\n prior_mean = next_prior_mean\n prior_cov = next_prior_cov\n\n # stack results\n prior_means = torch.stack(prior_mean_list, 1)\n prior_covs = [torch.stack(x, 1) for x in prior_cov_list]\n\n post_means = torch.stack(post_mean_list, 1)\n post_covs = [torch.stack(x, 1) for x in post_cov_list]\n\n return post_means, post_covs, prior_means, prior_covs", "def get_uniformization_sample(initial_state, terminal_state, states, path_length, rate_matrix):\n # map states to indices\n state_to_index = dict((state, i) for i, state in enumerate(states))\n # find the maximum rate away from a state\n max_rate = max(-rate_matrix[(a, a)] for a in states)\n # create a uniformized discrete transition matrix in convenient dictionary form\n discrete_transition_matrix = {}\n for (a, b), r in rate_matrix.items():\n discrete_transition_matrix[(a, b)] = r / max_rate\n if a == b:\n discrete_transition_matrix[(a, b)] += 1.0\n # create a discrete transition matrix in the numpy format,\n # and create the rate matrix in the numpy format\n R = np.zeros((len(states), len(states)))\n numpy_rate_matrix = np.zeros((len(states), len(states)))\n for (a, b), r in rate_matrix.items():\n ia = state_to_index[a]\n ib = state_to_index[b]\n numpy_rate_matrix[ia, ib] = r\n R[ia, ib] = discrete_transition_matrix[(a, b)]\n # convert initial and terminal states to indices\n initial_index = state_to_index[initial_state]\n terminal_index = state_to_index[terminal_state]\n # get the probability of the terminal state given the initial state and the path length\n rate_matrix_exponential = scipy.linalg.matfuncs.expm(numpy_rate_matrix * path_length)\n Pab = rate_matrix_exponential[initial_index, terminal_index]\n # draw the number of state changes\n cumulative_probability = 0\n n = 0\n matrix_powers = MatrixPowerCache(R)\n cutoff = random.uniform(0, Pab)\n #print 'cutoff =', cutoff\n #print 'initial_index =', initial_index\n #print 'terminal_index =', terminal_index\n #print matrix_powers.get_power(0)\n while 1:\n poisson_factor = scipy.stats.poisson.pmf(n, max_rate * path_length)\n discrete_transition_factor = matrix_powers.get_power(n)[initial_index, terminal_index]\n cumulative_probability += poisson_factor * discrete_transition_factor\n #print 'cumulative probability =', cumulative_probability\n if cutoff < cumulative_probability:\n break\n n += 1\n #print 'n =', n\n # deal with degenerate cases\n if n == 0:\n return []\n elif n == 1:\n if initial_state == terminal_state:\n return []\n else:\n return [(random.uniform(0, path_length), terminal_state)]\n # Simulate a discrete path given the number of changes and the initial and terminal states.\n # The path is called virtual because some changes may be from a state to itself.\n virtual_path = get_discrete_path_sample(initial_state, terminal_state, states, n+1, discrete_transition_matrix)[1:]\n virtual_times = list(sorted(random.uniform(0, path_length) for i in range(n)))\n events = []\n last_state = initial_state\n last_time = 0\n for current_state, current_time in zip(virtual_path, virtual_times):\n if current_state == last_state:\n continue\n events.append((current_state, current_time))\n last_state = current_state\n last_time = current_time\n return events", "def value_iteration(P, nS, nA, gamma=0.9, tol=1e-3):\n\n\tvalue_function = np.zeros(nS)\n\tpolicy = np.zeros(nS, dtype=int)\n\tmax_num_iterations = 2000\n\n\t############################\n\t# YOUR IMPLEMENTATION HERE #\n\n\tfor i in range(max_num_iterations):\n\t\tprevious_value_function = np.copy(value_function)\n\t\tfor s in range(nS):\t# for each state\n\t\t\tq_values = np.zeros(nA)\n\t\t\tfor action in range(nA):\t# for each action\n\t\t\t\tq_value = 0\n\t\t\t\tfor transition in P[s][action]:\t# for every possible transition\n\t\t\t\t\tprobability = transition[0]\n\t\t\t\t\treward = transition[2]\n\t\t\t\t\tnext_state = transition[1]\n\t\t\t\t\tvalue_next_state = previous_value_function[next_state]\n\n\t\t\t\t\t# print(\"probability: \" + str(probability) + \"reward: \" + str(reward) + \"value next state: \" + str(value_next_state))\n\n\t\t\t\t\tq_value += probability * (reward + gamma * value_next_state)\n\t\t\t\t\n\t\t\t\tq_values[action] = q_value\n\n\t\t\tvalue_function[s] = np.amax(q_values)\t# the value is the max of these q values\n\t\t\t# extract policy\n\t\t\tpolicy[s] = np.argmax(q_values)\n\n\t\t# find the maximum difference between the previous value function and the current value function\n\t\tdifArray = np.subtract(value_function, previous_value_function)\n\t\tfor x in range(nS):\n\t\t\tdifArray[x] = abs(difArray[x])\n\t\tmaxDif = np.amax(difArray)\n\n\t\tif (maxDif < tol):\n\t\t\tprint(\"optimal policy found at iteration \" + str(i + 1))\n\t\t\tbreak\n\n\t############################\n\treturn value_function, policy", "def greedy_policy(self):\n # print(self.weights)\n policy = defaultdict(lambda: 0)\n\n for entry, values in self.weights.items():\n policy[entry] = np.argmax(self.weights[entry])\n # print(policy)\n\n return policy", "def select_action(policy, state):\n #torch.manual_seed(RAND_SEED) # Seed here is causing kernel to crash\n state = torch.from_numpy(state).long().unsqueeze(0)\n state = torch.zeros(3,9).scatter_(0,state,1).view(1,27)\n #print(state) # for 2b\n pr = policy(Variable(state))\n #print(pr) # for 2c\n m = torch.distributions.Categorical(pr)\n action = m.sample()\n log_prob = torch.sum(m.log_prob(action))\n return action.data[0], log_prob", "def policy_eval_v(policy, env, discount_factor=1.0, theta=0.00001):\n # Start with an all 0 value function\n V = np.zeros(env.nS)\n \n # loop door alle states heen \n # sla de oude state value op \n # Bereken de nieuwe state value door de SOM (kans omhoog * loop over waar je terrecht kunt komen * reward) kans omlaag..\n # kijk of je nog door moet gaan of stoppen\n delta = 1000 \n while delta > theta:\n # for x in range(2):\n delta = 0\n \n# loop throw possible states\n for state in range(env.nS):\n old_state_value = V[state]\n new_state_value = 0\n\n # loop shrow possible actions in state\n for action in range(env.nA):\n\n # print(\"kans omhoog\", policy[state][action])\n # print(\"kans omhoog uitkomen\", env.P[state][action][0][0])\n # print(\"direct reward\",env.P[state][action][0][2] )\n # print(\"value of that new state\", discount_factor * V[env.P[state][action][0][1]] )\n\n current_state_value = policy[state][action] * env.P[state][action][0][0] * ( env.P[state][action][0][2] + ( discount_factor * V[env.P[state][action][0][1]] ) ) \n# print(\"current state value\", current_state_value)\n new_state_value += current_state_value\n \n delta = max(delta, abs(old_state_value - new_state_value))\n V[state] = new_state_value\n# print(V[state])\n# print(\"delta\", delta)\n return np.array(V)", "def call(self, states):\n dist, mode = self.get_dist_and_mode(states)\n samples = dist.sample()\n log_probs = dist.log_prob(samples)\n log_probs = tf.expand_dims(log_probs, -1) # To avoid broadcasting\n return mode, samples, log_probs", "def forward(self, state):\n if self.noisy:\n lower, upper = self.x[0], self.x[-1]\n test_x = lower + torch.rand(len(self.x)) * (upper - lower)\n else:\n test_x = self.x\n\n with torch.no_grad(), gpytorch.settings.fast_pred_var():\n pred = self.gp(test_x)\n ucb = pred.mean + self.beta() * pred.stddev\n\n max_id = torch.argmax(ucb)\n next_point = test_x[[[max_id]]]\n return next_point, torch.zeros(1)", "def viterbi(p_observations_given_state, p_transition, p_initial):\n p_observations_given_state = numpy.asarray(p_observations_given_state)\n p_transition = numpy.asarray(p_transition)\n p_initial = numpy.asarray(p_initial)\n N, S = p_observations_given_state.shape\n assert p_transition.shape in {(S, S), (N-1, S, S)}\n if p_transition.shape == (S, S):\n p_transition = numpy.array([p_transition for i in range(N-1)])\n assert numpy.allclose(numpy.sum(p_transition, axis=2), 1)\n assert p_initial.shape == (S,)\n assert numpy.allclose(numpy.sum(p_initial), 1)\n\n # convert all probabilities to log probabilities so we can sum instead of\n # multiplying, which better controls numerical error.\n err = numpy.seterr(divide='ignore') # allow log(0) to go to -inf, as desired\n lp_observations_given_state = numpy.log(p_observations_given_state)\n lp_transition = numpy.log(p_transition)\n lp_initial = numpy.log(p_initial)\n numpy.seterr(**err)\n\n states = numpy.arange(S)\n # path[i] always contains the maximum likelihood sequence of states ending at state i\n path = [[i] for i in states]\n # lp_state contains the current log probability of being in the state given the sequence\n # of observations thus far considered.\n lp_state = lp_observations_given_state[0] + lp_initial\n\n for lp_obs, lp_trans in zip(lp_observations_given_state[1:], lp_transition):\n # For each observation after the first timepoint, construct an (S, S)\n # shape array where [si, sj] contains the log probability of going from\n # state si to state sj between time t and t+1.\n # Assume we know for each state si prob(si at time t), the probability\n # of being in that state at that time, then we can calculate the probability\n # of being in any given state sj at time t+1:\n # prob(transition from si at time t to sj at time t+1) = prob(si at t) *\n # prob(si->sj between t and t+1) *\n # prob(observation at t+1 given state sj)\n # prob(j at time t+1) = max_i(prob(i at time t -> j at time t+1))\n #\n # Thus we merely need to keep updating our estimates for the probability\n # of being in each state at each time, and keep a list of the path that\n # lead to each state.\n #\n # The actual code in use is 100% equivalent to the code below; however it\n # is rather more efficient.\n #\n # lp_transition_t = numpy.zeros((s, s), dtype=float)\n # new_path = []\n # lp_state = []\n # for s_to in states:\n # best_from_lp = -numpy.inf\n # for s_from in states:\n # lp_transition_t[s_from, s_to] = lp_state[s_from] + lp_trans[s_from, s_to] + lp_obs[s_to]\n # if lp_transition_t[s_from, s_to] > best_from_lp:\n # best_from = s_from\n # best_from_lp = lp_transition_t[s_from, s_to]\n # lp_state.append(best_from_lp)\n # new_path.append(path[best_from] + [s_to])\n # path = new_path\n lp_transition_t = lp_state[:,numpy.newaxis] + lp_trans + lp_obs[numpy.newaxis,:]\n best_from = numpy.argmax(lp_transition_t, axis=0)\n path = [path[s_from]+[s_to] for s_to, s_from in enumerate(best_from)]\n lp_state = lp_transition_t[best_from, states]\n last_state = numpy.argmax(lp_state)\n return numpy.array(path[last_state])", "def stateOccupationProbabilityGeneration(self):\n self.L = zeros((self.noOfEmmittingStates, self.T))\n\n for j in range(self.noOfEmmittingStates):\n for t in range(self.T):\n self.L[j,t] = (self.alpha[j+1, t+1] * self.beta[j+1, t+1]) / self.observationLikelihood", "def act(self, state, eps=0.):", "def env_runner(env, policy, num_local_steps, summary_writer):\n last_state = env.reset()\n last_features = policy.get_initial_features()\n length = 0\n rewards = np.zeros(5)\n \n while True:\n terminal_end = False\n rollout = PartialRollout()\n\n for local_step in range(num_local_steps):\n features = policy.features(last_state, *last_features)\n state, reward, terminal, info = env.step(None)\n\n if len(np.shape(terminal)) > 0:\n reward = np.sum(reward, axis=0) / len(terminal)\n state = state[-1]\n terminal = terminal[-1]\n # total_reward = np.zeros_like(reward[0])\n\n # for i, t in enumerate(terminal[:-1]):\n # if t:\n # total_reward += reward[i]\n # else:\n # total_reward += policy.value(state[i], *features)\n\n # if terminal[-1]:\n # total_reward += reward[-1]\n # total_reward /= len(terminal)\n # else:\n # total_reward /= len(terminal[:-1])\n # likelihood *= 1. / len(terminal)\n\n # reward = total_reward\n # state = state[-1]\n # terminal = terminal[-1]\n\n # collect the experience\n # note that the deepcopies seem to be necessary\n rollout.add(\n copy.deepcopy(last_state), \n copy.deepcopy(reward), \n info['weight'], \n terminal, \n copy.deepcopy(last_features))\n \n length += 1\n rewards += reward\n\n last_state = state\n last_features = features\n\n if info:\n summary = tf.Summary()\n for k, v in info.items():\n summary.value.add(tag=k, simple_value=float(v))\n summary_writer.add_summary(summary, policy.global_step.eval())\n summary_writer.flush()\n\n timestep_limit = env.spec.tags.get(\n 'wrapper_config.TimeLimit.max_episode_steps')\n if terminal or length >= timestep_limit:\n terminal_end = True\n if length >= timestep_limit or not env.metadata.get('semantics.autoreset'):\n last_state = env.reset()\n last_features = policy.get_initial_features()\n print(\"Episode finished. Sum of rewards: {}. Length: {}\".format(rewards, length))\n length = 0\n rewards = 0\n break\n\n if not terminal_end:\n rollout.r = policy.value(last_state, *last_features)\n\n # once we have enough experience, yield it\n yield rollout", "def step(self, observation, last_state):\n # We are omitting the details of network inference here.\n # ...\n feature_screen = observation[3]['feature_screen']\n feature_minimap = observation[3]['feature_minimap']\n feature_units = observation[3]['feature_units']\n feature_player = observation[3]['player']\n available_actions = observation[3]['available_actions']\n score_by_category = observation[3]['score_by_category']\n game_loop = observation[3]['game_loop']\n\n unit_type = feature_screen.unit_type\n empty_space = np.where(unit_type == 0)\n empty_space = np.vstack((empty_space[0], empty_space[1])).T\n random_point = random.choice(empty_space)\n #target = [random_point[0], random_point[1]]\n #action = [actions.FunctionCall(_BUILD_SUPPLY_DEPOT, [_NOT_QUEUED, target])]\n policy_logits = None\n new_state = None\n\n spatial_encoder_output = self.spatial_encoder(np.reshape(feature_screen, [1,128,128,27]))\n\n agent_statistics = get_agent_statistics(score_by_category)\n\n home_race = 'Terran'\n away_race = 'Terran'\n race = get_race_onehot(home_race, away_race)\n\n time = get_gameloop_obs(game_loop)\n\n upgrade_value = get_upgrade_obs(feature_units)\n if upgrade_value != -1:\n self.home_upgrade_array[np.where(upgrade_value[0] == 1)] = 1\n self.away_upgrade_array[np.where(upgrade_value[1] == 1)] = 1\n\n embedded_scalar = np.concatenate((agent_statistics, race, time, self.home_upgrade_array, self.away_upgrade_array), axis=0)\n scalar_encoder_output = self.scalar_encoder(np.reshape(embedded_scalar, [1,307]))\n embedded_feature_units = get_entity_obs(feature_units)\n entity_encoder_output = self.entity_encoder(np.reshape(embedded_feature_units, [1,512,464]))\n encoder_input = np.concatenate((spatial_encoder_output, scalar_encoder_output, entity_encoder_output), axis=1)\n\n core_input = np.reshape(encoder_input, [16, 8, 131])\n whole_seq_output, final_memory_state, final_carry_state = self.core(core_input)\n print(whole_seq_output.shape)\n print(final_memory_state.shape)\n print(final_carry_state.shape)\n\n action = [actions.FUNCTIONS.no_op()]\n\n return action, policy_logits, new_state", "def policy_evaluation(self):\n self.V = np.zeros((self.environment.num_rows * self.environment.num_columns,))\n self.environment.reset()\n\n end = False\n while not end:\n delta = 0\n\n for state in range(self.environment.num_rows * self.environment.num_columns):\n v = self.V[state]\n\n # Gets the max value got from any of the different accion\n self.V[state] = np.max([self.calc_value(state, action) * self.policy.get_action_probs(state, [\n self.environment.actions.index(action)]) for action in self.environment.actions])\n # Gets the maximum difference between current and previous values\n delta = max(delta, abs(v - self.V[state]))\n\n # Only ends if the delta is lower than theta (small value)\n end = delta <= self.theta\n\n return self.V", "def forward(self, ps, T):\n exp_values = self.expected_value_calc(ps)\n probs = F.softmax(exp_values/T, dim=-1)\n return probs", "def update(self):\n\n # get states, actions, rewards and total timesteps from memory\n states, actions, R, T = self.memory.get()\n n_ep = len(R)\n\n # compute value estimates for the states\n v = self.critic(states)\n\n # compute advantages (using GAE) and rewards to go\n A, rtg = utils.gae_rtg((R, v, T), self.gam, self.lam)\n\n # store the initial version of both the policy and the log probs of the\n # actions for later comparison with the future versions (needed for PPO)\n policy_old = copy.deepcopy(self.policy)\n log_probs_old = policy_old(states).log_prob(actions)\n\n # sample from a batch of experiences\n # (\"_\" subscript indicates \"sampled from\")\n for (v_, A_, rtg_, log_probs_old_), i in utils.sample_batch((v, A, rtg, log_probs_old), self.batch_size, self.policy_updates):\n log_probs_ = self.policy(states).log_prob(actions)[i]\n\n # estimate ratio between the new log probs and the old ones\n r_ = torch.exp(log_probs_ - log_probs_old_)\n\n l_1 = r_ * A_\n l_2 = torch.clamp(r_, 1-self.eps, 1+self.eps) * A_\n\n # TODO: implement entropy\n # TODO: merge policy and critic\n\n # surragate loss function for PPO\n l_clip = -torch.mean(torch.min(l_1, l_2))\n\n # update the policy\n self.policy_optimizer.zero_grad()\n l_clip.backward(retain_graph=True)\n self.policy_optimizer.step()\n\n # sample a batch of value estimates and the corresponding rewards to go\n # to update the value function.\n for (v_, rtg_), _ in utils.sample_batch((v, rtg), self.batch_size, self.v_updates):\n # compute the loss\n critic_loss = F.mse_loss(v_, rtg_)\n\n # update the critic\n self.critic_optimizer.zero_grad()\n critic_loss.backward(retain_graph=True)\n self.critic_optimizer.step()\n\n # clear the memory. PPO is an On-Policy method so we don't need these\n # memories anymore\n self.memory.clear()\n\n # return the loss of the value function for display\n return F.mse_loss(v, rtg)", "def __call__(\n self, policy: nn.Module, state: np.ndarray, t: float = 0.0\n ) -> Tuple[np.ndarray, ...]:\n action = self.action_selector(policy, state)\n if not self.exploration:\n return action\n\n ou_state = self.evolve_state()\n self.sigma = self.max_sigma - (self.max_sigma - self.min_sigma) * min(\n 1.0, t / self.decay_period\n )\n return np.clip(action + ou_state, self.action_min, self.action_max)", "def __init__(self, state_0):\n self.state = state_0\n self.s_dot = 0\n self.hist = []\n self.time = 0.0\n control_frequency = 200 # Hz for attitude control loop\n self.dt = 1.0 / control_frequency\n self.desired_state = 0", "def select_action(policy, state):\n state = torch.from_numpy(state).long().unsqueeze(0)\n state = torch.zeros(3,9).scatter_(0,state,1).view(1,27)\n pr = policy(Variable(state))\n m = torch.distributions.Categorical(pr)\n action = m.sample()\n log_prob = torch.sum(m.log_prob(action))\n return action.data[0], log_prob", "def test_Integrator_Propagator_full(annealing_steps=10):\n from qmlify.propagation import Propagator\n pdf_state, pdf_state_subset, integrator, ani_handler, atom_map, particle = propagator_testprep()\n\n propagator = Propagator(openmm_pdf_state = pdf_state,\n openmm_pdf_state_subset = pdf_state_subset,\n subset_indices_map = atom_map,\n integrator = integrator,\n ani_handler = ani_handler,\n context_cache=None,\n reassign_velocities=True,\n n_restart_attempts=0)\n\n particle_state, _return_dict = propagator.apply(particle.state, n_steps = annealing_steps, reset_integrator=True, apply_pdf_to_context=True)\n\n #assert that the iteration is equal to the total number of iterations\n assert propagator._iteration == propagator._n_iterations\n\n #the length of the state works must be the annealing step length + 1 since the first work is defaulted as 0.\n assert len(propagator.state_works[0]) == annealing_steps + 1\n\n #check to make sure that the particle state is maintained in memory\n assert particle_state == particle.state\n\n #the work should be negative\n assert propagator.state_works[0][-1] < 0." ]
[ "0.6379594", "0.6345793", "0.62748617", "0.6227867", "0.61537546", "0.60586053", "0.60436994", "0.60436994", "0.6035179", "0.6025242", "0.6023306", "0.60075766", "0.5958909", "0.59503", "0.5950126", "0.5924466", "0.5919219", "0.5919196", "0.5918052", "0.58889544", "0.5880986", "0.5869029", "0.5863591", "0.58579665", "0.5846109", "0.5838235", "0.5834959", "0.58302546", "0.5826932", "0.58265316", "0.5824154", "0.58237565", "0.58226866", "0.58133435", "0.578452", "0.5780197", "0.5774484", "0.5770011", "0.5740116", "0.57351285", "0.5721551", "0.5718725", "0.5704837", "0.5702901", "0.56812096", "0.5672381", "0.56689405", "0.56626624", "0.5639293", "0.5638851", "0.56237453", "0.5609109", "0.5607532", "0.55941546", "0.5586998", "0.5585317", "0.5585061", "0.5584423", "0.55800444", "0.5559985", "0.5556322", "0.5551897", "0.55462706", "0.554091", "0.5537642", "0.5537009", "0.5533359", "0.55131495", "0.5512101", "0.55118793", "0.55099475", "0.5508985", "0.5495398", "0.5494048", "0.5491399", "0.5485599", "0.54806554", "0.54705006", "0.54701614", "0.54699373", "0.5461279", "0.5459462", "0.54544", "0.5451658", "0.5451374", "0.5447382", "0.54420495", "0.54415065", "0.5436843", "0.54360545", "0.5435316", "0.54333353", "0.543325", "0.54326004", "0.5430108", "0.5428868", "0.5424297", "0.54221886", "0.54202944", "0.54179865" ]
0.70631313
0
Compute expected feature expectations from demonstration trajectories
def find_feature_expectations(state_features, demos): feature_exp = np.zeros(state_features.shape[1]) for demo in demos: for state in demo: feature_exp += state_features[state] # Expected feature_exp return feature_exp/np.shape(demos)[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_predictor():", "def test_basic(self):\n data = get()\n metrics = [verif.metric.Within(),\n verif.metric.A(), # Hit\n verif.metric.B(), # FA\n verif.metric.C(), # Miss\n verif.metric.D(), # Correct rejection\n verif.metric.Hit(),\n verif.metric.Threat(),\n verif.metric.Conditional(),\n verif.metric.XConditional(func=np.median),\n ]\n intervals = [verif.interval.Interval(-np.inf, 0, True, True), # [-inf, 0]\n verif.interval.Interval(-np.inf, 1, True, True),\n verif.interval.Interval(-np.inf, 2, True, True),\n ]\n obs = [0, 1.5, 2]\n fcst = [3.1, 1.1, -2.1]\n N = len(obs)*1.0\n\n # Each line is one metric (one number for each threshold)\n expected = [[0/N, 100/N, 100/N], # Within\n [0/N, 0/N, 2/N], # Hit\n [1/N, 1/N, 0/N], # FA\n [1/N, 1/N, 1/N], # Miss\n [1/N, 1/N, 0/N], # Correct rejection\n [0, 0, 2.0/3], # Hit rate\n [0, 0, 2.0/3], # Threat score\n [3.1, 3.1, 0.7], # Average fcst given obs in interval\n [0, 0, 1.5], # Average obs given obs in interval\n ]\n\n for m in range(len(metrics)):\n metric = metrics[m]\n for i in range(len(intervals)):\n value = metric.compute_from_obs_fcst(np.array(obs), np.array(fcst), intervals[i])\n ex = expected[m][i] * 1.0\n if np.isnan(value):\n self.assertTrue(np.isnan(ex))\n else:\n self.assertAlmostEqual(ex, value)", "def test_all_features_with_data(self):\n feature1 = Feature('looktest1')\n feature1.set_percentage(5)\n\n feature2 = Feature('looktest2')\n feature2.activate()\n feature2.add_to_whitelist(3)\n\n feature3 = Feature('looktest3')\n feature3.activate()\n feature3.add_to_blacklist(4)\n feature3.add_to_blacklist(5)\n\n feature4 = Feature('looktest4')\n feature4.activate()\n feature4.add_to_whitelist(3)\n feature4.add_to_whitelist(5)\n feature4.add_to_blacklist(4)\n\n all_features = Feature.all_features(include_data=True)\n self.assertEqual(len(all_features), 4)\n\n for key in ['looktest1', 'looktest2', 'looktest3', 'looktest4']:\n self.assertTrue(key in all_features)\n if not key == 'looktest1':\n self.assertEqual(all_features[key]['percentage'], 100)\n\n self.assertEqual(all_features['looktest1']['percentage'], 5)\n self.assertFalse('whitelist' in all_features['looktest1'])\n self.assertFalse('blacklist' in all_features['looktest1'])\n\n self.assertTrue('whitelist' in all_features['looktest2'])\n self.assertEqual(all_features['looktest2']['whitelist'], [3])\n self.assertFalse('blacklist' in all_features['looktest2'])\n\n self.assertFalse('whitelist' in all_features['looktest3'])\n self.assertTrue('blacklist' in all_features['looktest3'])\n self.assertEqual(all_features['looktest3']['blacklist'], [4, 5])\n\n self.assertTrue('whitelist' in all_features['looktest4'])\n self.assertEqual(all_features['looktest4']['whitelist'], [3, 5])\n self.assertTrue('blacklist' in all_features['looktest4'])\n self.assertEqual(all_features['looktest4']['blacklist'], [4])", "def feature_expectation_from_trajectories(features, trajectories):\n n_states, n_features = features.shape\n\n fe = np.zeros(n_features)\n\n for t in trajectories:\n for s in t.states():\n fe += features[s, :]\n\n return fe / len(trajectories)", "def test_gtf(self):\n #TODO write bed tests", "def test_active_inference_SPM_1b(self):", "def test(self):\n y_list = []\n y_hat_list = []\n for ex_dict in ut.TEST_LIST:\n y_list.append(ex_dict[1])\n y_hat_list.append(self.predict(ex_dict[0]))\n acc = ut.compute_accuracy(y_hat_list, y_list)\n return y_hat_list, acc", "def getExpectations():", "def test_statistics(self):\n output_path = FLAGS.test_tmpdir\n output_name = 'temp'\n\n equation_name = 'advection'\n discretization = 'finite_volume'\n\n # create a temporary dataset\n with flagsaver.flagsaver(\n dataset_path=output_path,\n dataset_name=output_name,\n equation_name=equation_name,\n discretization=discretization,\n simulation_grid_size=256,\n output_grid_size=32,\n dataset_type='all_derivatives',\n total_time_steps=10,\n example_num_time_steps=3,\n time_step_interval=5,\n num_seeds=4,\n ):\n create_training_data.main([], runner=beam.runners.DirectRunner())\n\n metadata_path = os.path.join(output_path, output_name + '.metadata.json')\n dataset_metadata = readers.load_metadata(metadata_path)\n low_res_grid = readers.get_output_grid(dataset_metadata)\n\n equation = advection_equations.FiniteVolumeAdvectionDiffusion(\n diffusion_coefficient=0.1)\n data_key = equation.key_definitions['concentration'].exact()\n dataset = readers.initialize_dataset(\n dataset_metadata, ((data_key,),), (low_res_grid,))\n dataset = dataset.repeat(1)\n dataset = dataset.batch(1)\n all_data = np.concatenate(\n [np.ravel(data[0][data_key]) for data in dataset])\n\n expected_mean = np.mean(all_data)\n expected_variance = np.var(all_data, ddof=1)\n\n keys = readers.data_component_keys(dataset_metadata['components'])\n components_dict = dict(zip(keys, dataset_metadata['components']))\n\n component = components_dict[data_key, low_res_grid]\n metadata_mean = component['statistics']['mean']\n metadata_variance = component['statistics']['variance']\n\n np.testing.assert_allclose(metadata_mean, expected_mean, atol=1e-3)\n np.testing.assert_allclose(metadata_variance, expected_variance, atol=1e-3)", "def test_theft_and_stealing(self):", "def test__extract_features(self):\n text_sample = \"I really really love this movie\"\n feature_sample = ['really','love','good']\n feature_score_type = \"presence\"\n model_sample = Model(feature_sample,feature_score_type)\n result_features = model_sample.extract_features(text_sample)\n assert_equal(result_features,{'really':1,'love':1,'good':0})\n feature_score_type = \"term_frequency\"\n model_sample = Model(feature_sample,feature_score_type)\n result_features = model_sample.extract_features(text_sample)\n assert_equal(result_features,{'really':2,'love':1,'good':0})", "def testfeatures(self):\n self.set_wdiff()\n xp,wp=st.crosslinematch(self.xarr, self.farr, self.slines, self.sfluxes,\n self.ws, mdiff=self.mdiff, wdiff=20, sigma=self.sigma, niter=self.niter)\n for x, w in zip(xp, wp):\n if w not in self.wp and w>-1: \n self.xp.append(x)\n self.wp.append(w)\n self.plotFeatures()\n self.redraw_canvas()", "def test__validate_features__0():\n for input_value, expected_output in (\n (None, None),\n ([], None),\n ([GuildFeature.animated_banner], (GuildFeature.animated_banner, )),\n ([GuildFeature.animated_banner.value], (GuildFeature.animated_banner, )),\n (\n [GuildFeature.animated_banner, GuildFeature.animated_icon],\n (GuildFeature.animated_banner, GuildFeature.animated_icon,),\n ),\n ):\n output = validate_features(input_value)\n vampytest.assert_eq(output, expected_output)", "def runTest(exdeflike, indeflike):\n\n with open (\"../data/2016/data/test\", \"r\") as f:\n records = re.split(\"\\n\\n\", f.read().strip()) #separate by double new line\n\n threshold = [0.3, .1] #just a guess for now\n ev = defaultdict(lambda: [0,0,0,0])\n\n for record in records:\n data = [re.split(\"\\t\", d) for d in re.split(\"\\n\", record)]\n try:\n tokens, tags = zip(*data)\n except:\n print data\n pass\n\n for i, token, el, il in test(tokens, exdeflike, indeflike):\n for model in range(4):\n result = \"tn\"\n if decide(el, il, model, threshold):\n result = \"tp\" if tags[i][0] == \"B\" else \"fp\"\n elif tags[i][0] == \"B\":\n result = \"fn\"\n ev[result][model] += 1\n\n for model in range(4):\n ev[\"precision\"][model] = ev[\"tp\"][model] / (ev[\"tp\"][model] + ev[\"fp\"][model])\n ev[\"recall\"][model] = ev[\"tp\"][model] / (ev[\"tp\"][model] + ev[\"fn\"][model])\n ev[\"F1\"][model] = harmonic_mean([ev[\"precision\"][model], ev[\"recall\"][model]])\n\n return ev", "def test_num_evals(self):\n\t\tdetails = self.watcher.describe()\t\t\n\t\tself.assertTrue((details.M * details.rf == details.num_evals).all())", "def test_num_evals(self):\n\t\tdetails = self.watcher.describe()\t\t\n\t\tself.assertTrue((details.M * details.rf == details.num_evals).all())", "def test_10_test_model(self, example):\n res = example.calc_model()\n print(example.trips_ij)\n total_trips_target = example.persons_gi.sum()\n total_trips_actual = example.trips_ij.sum()\n np.testing.assert_almost_equal(total_trips_target, total_trips_actual)", "def test_features_property():\n atom = ATOMClassifier(X_bin, y_bin, random_state=1)\n atom.run(\"LR\")\n assert [i == j for i, j in zip(atom.lr.features, atom.features)]", "def test_workbench_scenarios(self):\n result_title = 'Adaptive Numeric Input XBlock'\n basic_scenario = \"<adaptivenumericinput />\"\n test_result = self.xblock.workbench_scenarios()\n self.assertEquals(result_title, test_result[0][0])\n self.assertIn(basic_scenario, test_result[0][1])", "def test_stage_0():\n\tra_1 = readImage(TRAIN_RAW_IMAGE_1)\n\tre_1 = readImage(TRAIN_RESULT_IMAGE_1)\n\n\tra_2 = readImage(TRAIN_RAW_IMAGE_2)\n\tre_2 = readImage(TRAIN_RESULT_IMAGE_2)\n\n\t# Uncomment below if more examples are required.\n\t# ra_3 = readImage(TRAIN_RAW_IMAGE_3)\n\t# re_3 = readImage(TRAIN_RESULT_IMAGE_3)\n\n\t# Uncomment below if the additional features are needed.\n\t# ra_1 += (\n\t# \tlaplace_operator(TRAIN_RAW_IMAGE_1),\\\n\t# \t# k_means(TRAIN_RAW_IMAGE_1)[0],\\\n\t# \t)\n\n\t# Uncomment below if the additional features are needed.\n\t# ra_2 += (\n\t# \tlaplace_operator(TRAIN_RAW_IMAGE_2),\\\n\t# \t# k_means(TRAIN_RAW_IMAGE_2)[0],\\\n\t# \t)\n\n\t# The prediction model is obtained and trained.\n\tengine = get_model((ra_1, ra_2,), (re_1, re_2,), model_type=SVM, percentage=0.1)\n\n\ttest_percentage = float(1) # how many tests\n\n\tra_1 = readImage(TEST_RAW_IMAGE_1)\n\n\t# Uncomment below if the additional features are needed.\n\t# ra_1 += (\n\t# \tlaplace_operator(TEST_RAW_IMAGE_1),\\\n\t# \t# k_means(TEST_RAW_IMAGE_1)[0],\\\n\t# \t)\n\n\tre_1 = readImage(TEST_RESULT_IMAGE_1)\n\n\t# ra_2 = readImage(TEST_RAW_IMAGE_2)\n\t# re_2 = readImage(TEST_RESULT_IMAGE_2)\n\n\tinput_vec = []\n\t# The features are extracted.\n\tinput_vec += buildFeatureArray_2(ra_1[0], ra_1[1], ra_1[2],\\\n\t\tRADIUS_ARRAY,\\\n\t\tadditional_feats=([] if len(ra_1) == 3 else ra_1[3:]))\n\n\tex_no = int(test_percentage * len(input_vec)) # actual number of the test sample\n\n\toutput_vec = []\n\toutput_vec += matrixToArray(re_1[0], lambda el: 1 if el == 255 else 0)\n\n\tprint('Will start predicting...')\n\n\tpredicted_vec = engine.predict(input_vec[:ex_no])\n\n\tcounter = float(0)\n\tfor y, p in zip(output_vec[:ex_no], predicted_vec[:ex_no]):\n\t\tif y == p: counter += 1\n\n\tprint('Accuracy: ' + str(counter/ex_no))\n\n\tpredicted_mat = arrayToMatrix( predicted_vec, len(re_1[0]), len(re_1[0][0]),\\\n\t\tlambda el: 255 if el == 1 else 0)\n\n\t# The predicted segmentation is saved.\n\tsave_rgb_img(\\\n\t np.array(predicted_mat).transpose(),\\\n\t np.array(predicted_mat).transpose(),\\\n\t np.array(predicted_mat).transpose(),\\\n\t 'pred.bmp',\\\n\t)", "def test_get_activity():\n # basically create some depth and score arrays\n Example = namedtuple('Example', ('score', 'depth', 'active_expected', 'shift_expected'))\n examples = [\n Example([0, 0, 0, 0, 1, 3, 1, 0, 0], [5, 5, 8, 8, 12, 14, 15, 15, 15], [False] * 7, 0),\n Example([0, 0, 3, 5, 0, 0, 0, 9, 0], [5, 7, 8, 10, 8, 7, 8, 8, 8], [False, True, True, False, False, True, True], 8),\n Example([31, 30, 30, 0, 0, 0, 0, 0, 0], [120, 180, 150, 150, 130, 80, 40, 20, 40], [True] + [False] * 6, 2),\n Example([0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [False] * 7, 0)]\n for i, example in enumerate(examples):\n activity = _get_active(np.array(example.score), np.array(example.depth))\n active, shift = get_activity(np.array(example.score), np.array(example.depth), 4)\n if not all([a == b for a, b in zip(activity, example.active_expected)]):\n raise ValueError('Error in activity calculation for example {}:\\n'\n 'Observed != Expected:\\n{}\\n{}'.format(1+i, activity, example.active_expected))\n if shift != example.shift_expected:\n raise ValueError('Error in window shift calculation for example {}:\\n'\n '{} != {}'.format(1+i, shift, example.shift_expected))\n if active != any(activity):\n raise ValueError('How did you screw this up, man?')", "def _test_examples(self):\n checks = [\n (\n \"ex5_line-of-sight_solution\",\n [r\"RAJA sequential\", r\"RAJA OpenMP\", r\"result -- PASS\"],\n ),\n (\n \"ex6_stencil-offset-layout_solution\",\n [r\"RAJA Views \\(permuted\\)\", r\"result -- PASS\"],\n ),\n (\n \"ex8_tiled-matrix-transpose_solution\",\n [r\"parallel top inner loop\", r\"collapsed inner loops\", r\"result -- PASS\"],\n ),\n (\"kernel-dynamic-tile\", [r\"Running index\", r\"(24,24)\"]),\n (\"plugin-example\", [r\"Launching host kernel for the 10 time\"]),\n (\"tut_batched-matrix-multiply\", [r\"result -- PASS\"]),\n (\"wave-eqn\", [r\"Max Error = 2\", r\"Evolved solution to time\"]),\n ]\n for exe, expected in checks:\n reason = \"test: checking output of {0} for {1}\".format(exe, expected)\n self.run_test(\n exe,\n [],\n expected,\n installed=False,\n purpose=reason,\n skip_missing=True,\n work_dir=self._extra_tests_path,\n )", "def test_on(self, examples):\n stats = defaultdict(int)\n num_correct, num_examples = 0, 0\n for filename, category in examples:\n hypothesis = self.classify_doc(filename)[0]\n if category == 'positive':\n stats['total_real_pos'] += 1\n elif category == 'negative':\n stats['total_real_neg'] += 1\n stats['total_examples'] += 1\n if category == hypothesis:\n if category == 'positive':\n stats['pos_correctly_classified'] += 1\n else:\n stats['neg_correctly_classified'] += 1\n num_correct += 1\n else:\n if category == 'positive' and hypothesis == 'negative':\n stats['pos_classified_as_neg'] += 1\n elif category == 'negative' and hypothesis == 'positive':\n stats['neg_classified_as_pos'] += 1\n num_examples += 1\n\n return ((float(num_correct) / float(num_examples)), stats)", "def test_falsifications(self):\n implications = get_conditional_independencies(asia_example.graph)\n issues = get_falsifications(implications, asia_example.data)\n self.assertEqual(0, len(issues.failures))\n self.assertEqual(len(issues.evidence), len(implications))", "def test_predict_from_examples():\n examples = []\n for i in range(len(DATA)):\n (value, attribute_map) = DATA[i]\n print 'tuple created : \\n1:' + str(value) + \"\\n2:\" + str(attribute_map)\n ex = VowpalExample(i, value)\n #Extracting the features from all of the namespaces in the map. \n for (featureNamespace, features) in attribute_map.items():\n #Each \"section\" is \n ex.add_section(featureNamespace, features)\n examples.append(ex)\n train = examples[:-2]\n test = examples[-2:]\n vw = Vowpal(PATH_VW, './vw.%s', {'--passes' : '10' })\n preds = vw.predict_from_examples(train, test)\n for (id, value) in preds:\n print 'prediction for %s is %s' % (id, value)", "def ttest_review(sample_1, sample_2, alpha=.05):\n\n result = stats.ttest_ind(sample_1, sample_2)\n crit_val, p_val = result\n \n ## Creating interpretation based on p-value results.\n\n if p_val < .05:\n print(f'The feature is statistically significant with a p-value of {p_val}.')\n\n else:\n print(f'The feature is not statistically significant with a p-value of {p_val}.')\n \n return p_val", "def run_feature_extraction_tests():\n test_feature_extraction()\n test_distributed_feature_extraction()\n test_multimodel_feature_extraction()\n test_distributed_multimodel_feature_extraction()", "def test_screenip_unit_det(self):\n #\n # '''\n # Dose Equiv. Toxicity:\n #\n # The FI value (kg-diet) is multiplied by the reported NOAEC (mg/kg-diet) and then divided by\n # the test animal's body weight to derive the dose-equivalent chronic toxicity value (mg/kg-bw):\n #\n # Dose Equiv. Toxicity = (NOAEC * FI) / BW\n #\n # NOTE: The user enters the lowest available NOAEC for the mallard duck, for the bobwhite quail,\n # and for any other test species. The model calculates the dose equivalent toxicity values for\n # all of the modeled values (Cells F20-24 and results worksheet) and then selects the lowest dose\n # equivalent toxicity value to represent the chronic toxicity of the chemical to birds.\n # '''\n # try:\n # # result =\n # # self.assertEquals(result, )\n # pass\n # finally:\n # pass\n # return\n #\n #\n # def test_det_duck(self):\n # \"\"\"\n # unittest for function screenip.det_duck:\n # :return:\n # \"\"\"\n # try:\n # # det_duck = (self.noaec_duck * self.fi_bird(1580.)) / (1580. / 1000.)\n # screenip_empty.noaec_duck = pd.Series([1.], dtype='int')\n # screenip_empty.fi_bird = pd.Series([1.], dtype='int')\n # result = screenip_empty.det_duck()\n # npt.assert_array_almost_equal(result, 1000., 4, '', True)\n # finally:\n # pass\n # return\n #\n # def test_det_quail(self):\n # \"\"\"\n # unittest for function screenip.det_quail:\n # :return:\n # \"\"\"\n # try:\n # # det_quail = (self.noaec_quail * self.fi_bird(178.)) / (178. / 1000.)\n # screenip_empty.noaec_quail = pd.Series([1.], dtype='int')\n # screenip_empty.fi_bird = pd.Series([1.], dtype='int')\n # result = screenip_empty.det_quail()\n # npt.assert_array_almost_equal(result, 1000., 4, '', True)\n # finally:\n # pass\n # return\n #\n # def test_det_other_1(self):\n # \"\"\"\n # unittest for function screenip.det_other_1:\n # :return:\n # \"\"\"\n # try:\n # #det_other_1 = (self.noaec_bird_other_1 * self.fi_bird(self.bodyweight_bird_other_1)) / (self.bodyweight_bird_other_1 / 1000.)\n # #det_other_2 = (self.noaec_bird_other_2 * self.fi_bird(self.bodyweight_bird_other_1)) / (self.bodyweight_bird_other_1 / 1000.)\n # screenip_empty.noaec_bird_other_1 = pd.Series([400.]) # mg/kg-diet\n # screenip_empty.bodyweight_bird_other_1 = pd.Series([100]) # grams\n # result = screenip_empty.det_other_1()\n # npt.assert_array_almost_equal(result, 4666, 4)\n # finally:\n # pass\n # return\n #\n # The following tests are configured such that:\n # 1. four values are provided for each needed input\n # 2. the four input values generate four values of out_det_* per bird type\n # 3. the inputs per bird type are set so that calculations of out_det_* will result in\n # each bird type having one minimum among the bird types;\n # thus all four calculations result in one minimum per bird type\n\n # create empty pandas dataframes to create empty object for this unittest\n screenip_empty = self.create_screenip_object()\n\n expected_results = pd.Series([4.2174, 4.96125, 7.97237, 10.664648], dtype='float')\n result = pd.Series([], dtype='float')\n\n try:\n screenip_empty.bodyweight_bobwhite_quail = 178.\n screenip_empty.bodyweight_mallard_duck = 1580.\n screenip_empty.noaec_quail = pd.Series([100., 300., 75., 150.], dtype='float')\n screenip_empty.noaec_duck = pd.Series([400., 100., 200., 350.], dtype='float')\n screenip_empty.noaec_bird_other_1 = pd.Series([50., 200., 300., 250.], dtype='float')\n screenip_empty.noaec_bird_other_2 = pd.Series([350., 400., 250., 100.], dtype='float')\n screenip_empty.noaec_bodyweight_bird_other_1 = pd.Series([345.34, 453.54, 649.29, 294.56], dtype='float')\n screenip_empty.noaec_bodyweight_bird_other_2 = pd.Series([123.84, 85.743, 127.884, 176.34], dtype='float')\n screenip_empty.no_of_runs = len(expected_results)\n result = screenip_empty.det()\n npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )\n finally:\n tab = [result, expected_results]\n print(\"\\n\")\n print(inspect.currentframe().f_code.co_name)\n print(tabulate(tab, headers='keys', tablefmt='rst'))\n return", "def test_01_lighting(self):", "def test_findFeatures(self):\n features = self.builder._findChanges(\n self.project, self.builder._FEATURE)\n self.assertEquals(\n features,\n [(5, \"We now support the web.\"),\n (12, \"The widget is more robust.\"),\n (15,\n \"A very long feature which takes many words to describe with \"\n \"any accuracy was introduced so that the line wrapping behavior \"\n \"of the news generating code could be verified.\"),\n (16, \"A simpler feature described on multiple lines was added.\")])", "def test_something(self):\n\n allure.dynamic.title(\"Testing compute_ranks\")\n allure.dynamic.severity(allure.severity_level.NORMAL)\n allure.dynamic.description_html('<h3>Codewars badge:</h3>'\n '<img src=\"https://www.codewars.com/users/myFirstCode'\n '/badges/large\">'\n '<h3>Test Description:</h3>'\n \"<p>Test the function taht organizes a sports league in a \"\n \"round-robin-system. Each team meets all other teams. \"\n \"In your league a win gives a team 2 points, a draw gives \"\n \"both teams 1 point. After some games you have to compute \"\n \"the order of the teams in your league. You use the following \"\n \"criteria to arrange the teams:</p>\"\n \"<ul><li>- Points</li>\"\n \"<li>- Scoring differential (the difference between goals \"\n \"scored and those conceded)</li>\"\n \"<li>- Goals scored</li></ul>\")\n\n test_data = [\n (6,\n [[0, 5, 2, 2],\n [1, 4, 0, 2],\n [2, 3, 1, 2],\n [1, 5, 2, 2],\n [2, 0, 1, 1],\n [3, 4, 1, 1],\n [2, 5, 0, 2],\n [3, 1, 1, 1],\n [4, 0, 2, 0]],\n [4, 4, 6, 3, 1, 2]),\n (6,\n [[0, 5, 2, 0],\n [1, 4, 2, 2],\n [2, 3, 1, 3],\n [1, 5, 0, 0],\n [2, 0, 2, 1],\n [3, 4, 3, 1]],\n [2, 3, 4, 1, 5, 6]),\n (4,\n [[0, 3, 1, 1],\n [1, 2, 2, 2],\n [1, 3, 2, 0],\n [2, 0, 2, 0]],\n [3, 1, 1, 3]),\n (10,\n [],\n [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]),\n (8,\n [[0, 7, 2, 0]],\n [1, 2, 2, 2, 2, 2, 2, 8])\n ]\n\n for data in test_data:\n number = data[0]\n games = data[1]\n expected = data[2]\n actual_result = compute_ranks(number, games)\n print_log(number=number,\n games=games,\n expected=expected,\n actual_result=actual_result)\n\n with allure.step(\"Enter a test data and verify the result:\"):\n self.assertEqual(expected, actual_result)", "def test_variational():\n # iris\n #pres = \"Test pour le data set Iris (facile, classique)\"\n #test_from_func_variational(pres, 15, 10, 3, True, Iris)\n\n # breast cancer\n pres = \"Test pour le data set Breast Cancer (facile, classique)\"\n test_from_func_variational(pres, 15, 10, 3, True, Breast_cancer)\n\n # digits\n # pres = \"Test pour le data set Digits (difficile, classique)\"\n # test_from_func(pres, 10, 10, 10, True, Digits, quantum_instance)\n\n # wine\n # pres = \"Test pour le data set Wine (moyen, classique)\"\n # test_from_func(pres, 15, 10, 5, True, Wine, quantum_instance)\n\n # gaussian\n pres = \"Test pour des donnรฉes gaussiennes (moyen, classique)\"\n for _ in range(1):\n print(\"\\n\")\n print(\"New iteration\")\n test_from_func_variational(pres, 25, 10, 2, True, Gaussian)\n print(\"\\n\")\n\n # small adn strings\n pres = \"Test pour des sรฉquences ADN courtes (difficile, classique)\"\n test_from_func_variational(pres, 10, 15, 14, True, Sequence)\n\n #Quantum data\n pres = \"Test pour des donnรฉes gรฉnรฉrรฉes par ordinateur quantique (facile, quantique)\"\n print(pres)\n _, samp_train, samp_test, labels = ad_hoc_data(15, 10, 2, 0.3, True)\n sample_m, sample_p = stock_get(20, 0.3)\n\n labels_me = [-1, 1]\n samp_train_me = {-1: np.array(sample_m[:15]), 1: np.array(sample_p[:15])}\n samp_test_me = {-1: np.array(sample_m[15:]), 1: np.array(sample_p[15:])}\n print(samp_train)\n print(samp_train_me)\n print(samp_test)\n print(samp_test_me)\n\n my_impl_variational(samp_train, samp_test, labels)\n print(\"Pour autres donnรฉes quantiques\")\n my_impl_variational(samp_train_me, samp_test_me, labels_me)", "def three_experiments_with_trials(family_with_trials, single_with_trials):", "def test(self):\n\t\ttest_rewards = []\n\t\tobs = self.env.reset()\n\t\tdone_test = False\n\t\tfor t in range(self.test_cycles):\n\t\t\t# to render or not to render\n\t\t\tif self.render_test:\n\t\t\t\tenv.render()\n\n\t\t\tcycle_rewards = 0\n\t\t\twhile not done_test:\n\t\t\t\tfeed_dict_test = {self.va_input: obs}\n\t\t\t\taction_test = self.sess.run(self.va_out, feed_dict = feed_dict_test)\n\t\t\t\taction_test = self.sess.run(tf.argmax(action_test))\n\t\t\t\tobs_test, r_test, done_test,_ = env.step(action_test)\n\t\t\t\tcycle_rewards += r_test\n\n\t\t\ttest_rewards.append(cycle_rewards)\n\n\t\treturn test_rewards", "def test_get_scenarios_expanded(self):\n pass", "def test_machine_learning():", "def _expected_inputs():", "def test_test(task_dataset, features):\n features = torch.cat(features)\n feat = features[0]\n expected = features.eq(feat).sum().item() / N_SAMPLES\n\n class FakeModule(nn.Module):\n \"\"\"Always returns the same prediction.\"\"\"\n\n def forward(self, reps):\n \"\"\"Just returns the tag.\"\"\"\n assert reps.shape[-1] == N_DIMS_PER_REP\n logits = torch.zeros(len(reps), N_UNIQUE_FEATS)\n logits[:, feat] = 1\n return logits\n\n actual = learning.test(FakeModule(),\n task_dataset,\n device=torch.device('cpu'))\n assert actual == expected", "def test_print_results(self):\n calculated = super().predict_and_print()\n self.assertEqual(calculated, EXP_PRINT_OUTPUT_BASE.format(.18, .1, 0.186, self.test_model.model.train_time) +\n \"Max tree max_depth: 1\\n\"\n \"Number of n_estimators: 1\\n\"\n \"Impurity method: entropy\\n\")", "def test_similar_but_anomalous_variants_for_varinat_analysis(self):\n\n self.testcases[0].job_type = 'some_type1'\n self.testcases[0].project_name = 'project1'\n self.testcases[0].crash_state = 'abcde'\n self.testcases[0].one_time_crasher_flag = False\n self.testcases[0].crash_type = 'crash_type1'\n self.testcases[0].security_flag = True\n self.testcases[1].job_type = 'some_type2'\n self.testcases[1].project_name = 'project1'\n self.testcases[1].crash_state = 'vwxyz'\n self.testcases[1].crash_type = 'crash_type2'\n self.testcases[1].one_time_crasher_flag = False\n self.testcases[1].security_flag = True\n\n self.testcases.append(test_utils.create_generic_testcase())\n self.testcases[2].project_name = 'project1'\n self.testcases[2].crash_type = 'crash_type3'\n self.testcases[2].crash_state = 'x2'\n\n self.testcases.append(test_utils.create_generic_testcase())\n self.testcases[3].project_name = 'project1'\n self.testcases[3].crash_type = 'crash_type4'\n self.testcases[3].crash_state = 'y3'\n\n self.testcases.append(test_utils.create_generic_testcase())\n self.testcases[4].project_name = 'project1'\n self.testcases[4].crash_type = 'crash_type5'\n self.testcases[4].crash_state = 'z4'\n\n self.testcases.append(test_utils.create_generic_testcase())\n self.testcases[5].project_name = 'project1'\n self.testcases[5].crash_type = 'crash_type6'\n self.testcases[5].crash_state = 'w5'\n\n for t in self.testcases:\n t.put()\n\n # testcase2's varinat will be evaluated against testcase1\n self.testcase_variants[0].job_type = 'fake_engine_asan_project1'\n self.testcase_variants[0].testcase_id = self.testcases[0].key.id()\n self.testcase_variants[0].security_flag = True\n self.testcase_variants[1].job_type = 'some_type1'\n self.testcase_variants[1].crash_state = 'abcde'\n self.testcase_variants[1].crash_type = 'crash_type1'\n self.testcase_variants[1].testcase_id = self.testcases[1].key.id()\n self.testcase_variants[1].security_flag = True\n self.testcase_variants.append(test_utils.create_generic_testcase_variant())\n self.testcase_variants.append(test_utils.create_generic_testcase_variant())\n self.testcase_variants.append(test_utils.create_generic_testcase_variant())\n self.testcase_variants.append(test_utils.create_generic_testcase_variant())\n\n for i in range(2, 6):\n self.testcase_variants[i].job_type = 'some_type1'\n self.testcase_variants[i].crash_state = 'abcde'\n self.testcase_variants[i].crash_type = 'crash_type1'\n self.testcase_variants[i].testcase_id = self.testcases[i].key.id()\n self.testcase_variants[i].security_flag = True\n\n for v in self.testcase_variants:\n v.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def test_suite():\r\n test(intercept(1, 6, 3, 12) == 3.0)\r\n test(intercept(6, 1, 1, 6) == 7.0)\r\n test(intercept(4, 6, 12, 8) == 5.0)", "def test_model_outcome(predicted, actual, planned):\n if not isinstance(predicted, pd.DataFrame):\n predicted = pd.DataFrame(predicted, columns=[\"PREDICTED_TRIP_DURATION\"])\n if not isinstance(actual, pd.DataFrame):\n actual = pd.DataFrame(actual, columns=[\"ACTUAL_TRIP_DURATION\"])\n if not isinstance(planned, pd.DataFrame):\n planned = pd.DataFrame(planned, columns=[\"PLANNED_TRIP_DURATION\"])\n # Initialise the combined dataframe\n combined = pd.concat([predicted, actual, planned], axis=1)\n # Calculate the actual delay\n actual_delay = combined[\"PLANNED_TRIP_DURATION\"] - combined[\"ACTUAL_TRIP_DURATION\"]\n # Calculate the predicted delay\n predicted_delay = combined[\"PLANNED_TRIP_DURATION\"] - combined[\"PREDICTED_TRIP_DURATION\"]\n # Calculate the difference in delay\n delay_diff = actual_delay - predicted_delay\n # Combine the delays into a single dataframe\n combined_delay = pd.concat([pd.DataFrame(actual_delay, columns=['Actual_Delay']),\n pd.DataFrame(predicted_delay, columns=['Predicted_Delay']),\n pd.DataFrame(delay_diff, columns=['Difference_In_Delay'])], axis=1)\n # Obtain the index of the max and min values of the actual, predicted and difference delays\n actual_max_index = combined_delay[\"Actual_Delay\"].argmax()\n actual_min_index = combined_delay[\"Actual_Delay\"].argmin()\n predicted_max_index = combined_delay[\"Predicted_Delay\"].argmax()\n predicted_min_index = combined_delay[\"Predicted_Delay\"].argmin()\n delay_diff_max_index = combined_delay[\"Difference_In_Delay\"].argmax()\n delay_diff_min_index = combined_delay[\"Difference_In_Delay\"].argmin()\n # Get the Mean Absolute Error\n MAE = metrics.mean_absolute_error(combined[\"ACTUAL_TRIP_DURATION\"], combined[\"PREDICTED_TRIP_DURATION\"])\n # Get the R2 Score\n R2 = metrics.r2_score(combined[\"ACTUAL_TRIP_DURATION\"], combined[\"PREDICTED_TRIP_DURATION\"])\n # Get the Root Mean Squared Error\n RMSE = metrics.mean_squared_error(combined[\"ACTUAL_TRIP_DURATION\"], combined[\"PREDICTED_TRIP_DURATION\"],\n squared=False)\n # Get the Median Absolute Error\n MEDAE = metrics.median_absolute_error(combined[\"ACTUAL_TRIP_DURATION\"], combined[\"PREDICTED_TRIP_DURATION\"])\n # Get the Mean Squared Error Log Value\n MSLE = metrics.mean_squared_log_error(combined[\"ACTUAL_TRIP_DURATION\"], combined[\"PREDICTED_TRIP_DURATION\"])\n # Build Dictionary\n pass_val = {\"combined\": combined,\n \"combined_delay\": combined_delay,\n \"actual_max_index\": actual_max_index,\n \"actual_min_index\": actual_min_index,\n \"predicted_max_index\": predicted_max_index,\n \"predicted_min_index\": predicted_min_index,\n \"delay_diff_max_index\": delay_diff_max_index,\n \"delay_diff_min_index\": delay_diff_min_index,\n \"MAE\": MAE,\n \"R2\": R2,\n \"MEDAE\": MEDAE,\n \"RMSE\": RMSE,\n \"MSLE\": MSLE}\n # Return Dictionary\n return pass_val", "def getTestResults():", "def test_text_classifier_get_testing_samples(self):\n pass", "def test(self):\n bs = verif.metric.Bs()\n bsrel = verif.metric.BsRel()\n bsres = verif.metric.BsRes()\n bsunc = verif.metric.BsUnc()\n bss = verif.metric.Bss()\n obs = [[0],\n [0],\n [0],\n [1],\n [0, 0, 1, 1, 1],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]\n fcst = [[0],\n [1],\n [0.3],\n [0.1],\n [0.21, 0.21, 0.21, 0.91, 0.91],\n [0.06, 0.61, 0.45, 0.87, 0.13, 0.61, 0.79, 0.61, 0.06, 0.06, 0.79, 0.61, 0.13, 0.13, 0.79, 0.21, 0.06, 0.55, 0.37, 0.37]]\n ans = {bs: [0, 1, 0.09, 0.81, 0.1457, 0.34928],\n bsrel: [0, 1, 0.09, 0.81, 0.01236667, 0.2076133],\n bsres: [0, 0, 0, 0, 0.1066667, 0.1083333],\n bsunc: [0, 0, 0, 0, 0.24, 0.25],\n bss: [np.nan, np.nan, np.nan, np.nan, 0.3929167, -0.39712]}\n for i in range(len(obs)):\n o = np.array(obs[i])\n f = np.array(fcst[i])\n for key in ans:\n print(key, i)\n calculated = key.compute_from_obs_fcst(o, f)\n expected = ans[key][i]\n if np.isnan(expected):\n self.assertTrue(np.isnan(expected), np.isnan(calculated))\n else:\n self.assertAlmostEqual(expected, calculated, places=5)", "def test_suite():\n test(calc_det([[2, 1],[3, 4]]), 5)", "def test_tanimoto_distance(get_distributions):\n for i, dist_a in enumerate(get_distributions):\n for j, dist_b in enumerate(get_distributions):\n tanimototo = tanimoto_distance(dist_a, dist_b)\n if i == j:\n assert pytest.approx(tanimototo, 0.0001) == 1\n else:\n assert tanimototo < 1", "def test_audio_features(self):\n\n # 1ehPJRt49h6N0LoryqKZXq, 8737: How Far I'll Go (Alessia Cara Version) by Alessia Cara\n # 2fGFaTDbE8aS4f31fM0XE4, 5037: Pop 101 (feat. Anami Vice) by Marianas Trench\n targets = {8737: {'danceability': 0.317,\n 'energy': 0.562,\n 'key': 9,\n 'loudness': -9.609,\n 'mode': 1,\n 'speechiness': 0.395,\n 'acousticness': 0.124,\n 'instrumentalness': 0.000144,\n 'liveness': 0.0667,\n 'valence': 0.127,\n 'tempo': 181.100,\n 'duration_ms': 175507,\n 'time_signature': 4,\n },\n 5037: {'danceability': 0.756,\n 'energy': 0.658,\n 'key': 11,\n 'loudness': -6.128,\n 'mode': 0,\n 'speechiness': 0.202,\n 'acousticness': 0.0581,\n 'instrumentalness': 0,\n 'liveness': 0.0674,\n 'valence': 0.640,\n 'tempo': 120.018,\n 'duration_ms': 247829,\n 'time_signature': 4,\n },\n }\n\n results = {track.i_id: track for track in self.tracks if track.i_id in targets}\n\n for target, expecteds in targets.iteritems():\n result = results[target]\n for key, expected in expecteds.iteritems():\n self.assertEqual(result.__getattr__(key), expected)", "def test(ndigit, elambda, showSamples, showConfusion):\n Data, Label = getData()\n trainX, trainY, testX, testY = splitData(Data, Label, ndigit)\n trainX_mean = np.mean(trainX, axis=0)\n trainX_new = trainX - trainX_mean\n eigenvectors = getEigenVectors(trainX_new, elambda)\n trainX_eigen = trainX_new.dot(eigenvectors)\n testX_new = testX - trainX_mean\n testX_eigen = testX_new.dot(eigenvectors)\n testO = []\n if showSamples:\n correct_samples = []\n correct_samples_nearest = []\n correct_samples_eigen = []\n correct_samples_nearest_eigen = []\n correct_samples_labels = []\n correct_samples_predictions = []\n wrong_samples = []\n wrong_samples_nearest = []\n wrong_samples_eigen = []\n wrong_samples_nearest_eigen = []\n wrong_samples_labels = []\n wrong_samples_predictions = []\n if showConfusion:\n conf = np.zeros((ndigit, ndigit))\n for i in xrange(testX_eigen.shape[0]):\n t = testX_eigen[i]\n j = getNearestSampleIndex(t, trainX_eigen)\n p = int(trainY[j])\n y = int(testY[i])\n if showConfusion:\n conf[p, y] += 1\n if showSamples:\n if p == y:\n if len(correct_samples) < y + 1:\n correct_samples.append(testX[i])\n correct_samples_nearest.append(trainX[j])\n correct_samples_eigen.append(testX_eigen[i])\n correct_samples_nearest_eigen.append(trainX_eigen[j])\n correct_samples_labels.append(y)\n correct_samples_predictions.append(p)\n else:\n if len(wrong_samples) < y + 1:\n wrong_samples.append(testX[i])\n wrong_samples_nearest.append(trainX[j])\n wrong_samples_eigen.append(testX_eigen[i])\n wrong_samples_nearest_eigen.append(trainX_eigen[j])\n wrong_samples_labels.append(y)\n wrong_samples_predictions.append(p)\n testO.append(p)\n testO = np.array(testO)\n train0 = []\n for i in xrange(trainX_eigen.shape[0]):\n t = trainX_eigen[i]\n j = getNearestSampleIndex(t, trainX_eigen)\n min_class = trainY[j]\n train0.append(min_class)\n train0 = np.array(train0)\n print \"for digits = %d lambda = %.2f train = %.6f test = %.6f \" % (\n ndigit, elambda, (train0 == trainY).mean(), (testO == testY).mean())\n if showConfusion:\n print conf\n if showSamples:\n displaySamples(correct_samples_labels, correct_samples_predictions,\n correct_samples, correct_samples_nearest,\n correct_samples_eigen, correct_samples_nearest_eigen,\n eigenvectors, trainX_mean, elambda, 'Correct')\n displaySamples(wrong_samples_labels, wrong_samples_predictions,\n wrong_samples, wrong_samples_nearest,\n wrong_samples_eigen, wrong_samples_nearest_eigen,\n eigenvectors, trainX_mean, elambda, 'Wrong')", "def test(theta, features, labels):\n N = labels.size # sample size 12665\n correct = 0\n for i in range(N):\n temp = Spamfilter.sigmoid(Spamfilter.activation(theta, features[i]))\n if (temp > 0.5 and labels[i] == 1):\n correct += 1\n elif(temp < 0.5 and labels[i] == 0):\n correct += 1\n print(correct, \"correct out of\", labels.size)\n print(\"testing error rate: \", 100-(correct/N*100))", "def SampleConditions(testcase, delta):\n if testcase == 0:\n t0 = delta + 1\n t1 = 0\n elif testcase == 1:\n t0 = 0\n t1 = delta + 1\n else:\n t0 = delta + 1\n t1 = delta + 1\n\n # Sample a test case\n passed = False\n while not passed:\n x0 = _xm * rng.uniform(-1, 1)\n x1 = _xm * rng.uniform(-1, 1)\n vm = _vm * rng.uniform(0, 1)\n am = _am * rng.uniform(0, 1)\n v0 = vm * rng.uniform(-1, 1)\n v1 = vm * rng.uniform(-1, 1)\n\n curve1 = interpolator.Compute1DTrajectory(x0, x1, v0, v1, vm, am)\n if not (len(curve1) == 2):\n continue\n t0 = curve1[0].duration\n t1 = curve1[1].duration\n if testcase == 0:\n passed = t0 < delta and t1 >= delta\n elif testcase == 1:\n passed = t0 >= delta and t1 < delta\n else:\n passed = t0 < delta and t1 < delta\n \n return x0, x1, v0, v1, vm, am", "def get_test_case_info():\n m = NNMatrixTrainer()\n return m.get_evaluations()", "def test_reviewData():\n starttime = UTCDateTime('2018-06-18T02:34:20')\n endtime = UTCDateTime('2018-06-18T02:37:20')\n st = rd.getdata('IU', 'TEIG,PAYG', '00', 'BHZ', starttime, endtime, savedat=True,\n filenamepref='Test1_', loadfromfile=True, reloadfile=False)\n\n event_lat = 14.178\n event_lon = -90.670\n\n rd.attach_coords_IRIS(st)\n rd.attach_distaz_IRIS(st, event_lat, event_lon)\n\n fig = rd.recsec(st)\n\n freqs, amps, fig2 = rd.make_multitaper(st, render=False)\n\n fig3 = rd.make_spectrogram(st)\n\n rd.nextpow2(7)\n\n stacc, stvel = rd.getpeaks(st)\n\n rd.fourier_spectra(st)", "def test_calculate_all_metrics_precision():\n pass", "def workbench_scenarios():\n return [\n (\"Oppia Embedding\",\n \"\"\"<vertical_demo>\n <oppia oppiaid=\"0\" src=\"https://www.oppia.org\" width=\"700\" />\n </vertical_demo>\n \"\"\"),\n ]", "def test_1sample(self):\r\n c = AlphaDiversityCalc(observed_otus)\r\n self.assertEqual(c(data_path=self.single_sample_otu_table_fp), [2])", "def test_analytical_vs_numerical():\n pass", "def workbench_scenarios(): \n return [\n (\"TermsXBlock\",\n \"\"\"<terms/>\n \"\"\"),\n (\"Multiple TermsXBlock\",\n \"\"\"<vertical_demo>\n <terms/>\n <terms/>\n <terms/>\n </vertical_demo>\n \"\"\"),\n ]", "def test_series_in_features(self):\n assert parse_command({'test{{A,B}}': {'depends_on': 'name{{A,B}}'}}) == [\n ('testA', {'depends_on': 'nameA'}), ('testB', {'depends_on': 'nameB'})]", "def test_get_scenarios(self):\n pass", "def testResults(self):\n problem = problems.simple()\n optimizer = meta.MetaOptimizer(net=dict(\n net=\"CoordinateWiseDeepLSTM\",\n net_options={\n \"layers\": (),\n \"initializer\": \"zeros\"\n }))\n minimize_ops = optimizer.meta_minimize(problem, 5)\n with self.test_session() as sess:\n sess.run(tf.global_variables_initializer())\n cost, final_x = train(sess, minimize_ops, 1, 2)\n\n # Torch results\n torch_cost = 0.7325327\n torch_final_x = 0.8559\n\n self.assertAlmostEqual(cost, torch_cost, places=4)\n self.assertAlmostEqual(final_x[0], torch_final_x, places=4)", "def test_act(self):\n\n spec = leabra.UnitSpec(g_bar_e=0.40, g_bar_l=2.80, g_bar_i=1.00, g_l=1.0,\n e_rev_e=1.00, e_rev_l=0.15, e_rev_i=0.15,\n act_thr=0.25, act_gain=600.00, act_sd=0.01)\n u = leabra.Unit(spec=spec)\n\n for _ in range(15):\n u.add_excitatory(1.0)\n u.calculate_net_in()\n u.cycle('minus')\n\n for _ in range(150):\n u.add_excitatory(1.0)\n u.calculate_net_in()\n u.cycle('minus')\n self.assertTrue(0.85 < u.act <= 0.95)\n\n for _ in range(10):\n u.add_excitatory(0.0)\n u.calculate_net_in()\n u.cycle('minus')\n\n self.assertTrue(u.act < 0.05)", "def test_build_feature_base(self):\n data = pd.DataFrame(pd.read_csv(\"tests/in_data/pro1_sub.csv\"))\n\n X = data.ix[:,1]\n Y = data.ix[:,0]\n model_sample = Model([],\"presence\")\n\n feature_base = model_sample.build_feature_base(X,Y)\n feature_evaluation =\n assert_equal(len(feature_base) > 10, True)", "def test_features(iris):\n assert iris.num_features == 4\n assert iris.feature_names == [\n \"sepal length (cm)\",\n \"sepal width (cm)\",\n \"petal length (cm)\",\n \"petal width (cm)\",\n ]", "def test_category_and_its_feature(self):\n class RunnerBlah(Runner):\n def __init__(self, renv):\n super(RunnerBlah, self).__init__(renv)\n self.register_feature_class('bravo', Feature)\n self.register_feature_class('charlie', Feature)\n self.register_feature_category_class(\n 'alpha', features=['bravo', 'charlie'], mono=True)\n\n renv = create_runtime(RunnerBlah)\n renv.create_runner('runner')\n\n ctrl = renv.feature_ctrl\n\n total_order, _ = ctrl.get_activation_order(['alpha', 'bravo'])\n self.assertEqual(['bravo'], total_order)", "def test(references, corpus, pred):\n sim_vectors = mod.model(references, corpus)\n probs = []\n for block in sim_vectors:\n probs.append(pred.predict_proba(block))\n return [[prob[1] for prob in prob_group] for prob_group in probs]", "def unit_test():\n # Fixed filename\n ROOT = '/home/knmac/projects/vid_time_model/data/EDTCN_results/50Salads/'\\\n 'mid/mid_motionconstraint_nomotion_g0/nepoch_200'\n RUN = 'run_11'\n SPLIT = 'Split_1'\n FNAME = os.path.join(ROOT, RUN, SPLIT+'.mat')\n\n # Load computed results\n content = open(os.path.join(ROOT, RUN, 'trials.txt')).read().splitlines()\n for line in content:\n if SPLIT in line:\n break\n tokens = line.split(' ')\n acc_rec = tokens[2].replace('accuracy:', '').replace(',', '')\n edit_rec = tokens[3].replace('edit_score:', '').replace(',', '')\n f1_rec = tokens[4].replace('overlap_f1:', '').replace(',', '')\n\n # Load data\n data = scipy.io.loadmat(FNAME)\n P, S, Y = data['P'].squeeze(), data['S'].squeeze(), data['Y'].squeeze()\n P = [x.squeeze() for x in P]\n S = S.tolist()\n Y = [x.squeeze() for x in Y]\n\n # Compute metrics\n acc = accuracy(P, Y)\n edit = edit_score(P, Y, norm=True, bg_class=0)\n f1 = overlap_f1(P, Y, n_classes=18, bg_class=0)\n _, mAP = mid_mAP(P, Y, S, bg_class=0)\n\n # Print out\n print('Testing metrics...')\n print(' Acc: computed={:.02f} - recorded={}'.format(acc, acc_rec))\n print(' Edit: computed={:.02f} - recorded={}'.format(edit, edit_rec))\n print(' F1@10: computed={:.02f} - recorded={}'.format(f1, f1_rec))\n print(' mAP: computed={:.02f}'.format(mAP))\n return 0", "def test_T4():", "def test_T4():", "def test_recognize_describe(self):\n pass", "def test__validate_features__1():\n for input_value in (\n 12.6,\n ):\n with vampytest.assert_raises(TypeError):\n validate_features(input_value)", "def workbench_scenarios():\n return [\n (\"SummaryXBlock\",\n \"\"\"<summary/>\n \"\"\"),\n (\"Multiple SummaryXBlock\",\n \"\"\"<vertical_demo>\n <summary/>\n <summary/>\n <summary/>\n </vertical_demo>\n \"\"\"),\n ]", "def generate_features_test(stances, dataset, name, feature_list, features_dir):\n h, b, bodyId, headId = [], [], [], []\n\n feature_dict = {'overlap': word_overlap_features,\n 'refuting': refuting_features,\n 'polarity': polarity_features,\n 'hand': hand_features,\n 'word_unigrams_5000_concat_tf_l2_holdout_unlbled_test': word_unigrams_5000_concat_tf_l2_holdout_unlbled_test,\n 'NMF_cos_300_holdout_unlbled_test': NMF_cos_300_holdout_unlbled_test,\n 'NMF_concat_300_holdout_unlbled_test': NMF_concat_300_holdout_unlbled_test,\n 'latent_dirichlet_allocation_25_holdout_unlbled_test': latent_dirichlet_allocation_25_holdout_unlbled_test,\n 'latent_semantic_indexing_gensim_300_concat_holdout_unlbled_test': latent_semantic_indexing_gensim_300_concat_holdout_unlbled_test,\n 'NMF_fit_all_incl_holdout_and_test': NMF_fit_all_incl_holdout_and_test,\n 'latent_dirichlet_allocation_incl_holdout_and_test': latent_dirichlet_allocation_incl_holdout_and_test,\n 'latent_semantic_indexing_gensim_holdout_and_test': latent_semantic_indexing_gensim_holdout_and_test,\n 'NMF_fit_all_concat_300_and_test': NMF_fit_all_concat_300_and_test,\n 'word_ngrams_concat_tf5000_l2_w_holdout_and_test': word_ngrams_concat_tf5000_l2_w_holdout_and_test,\n 'NMF_fit_all': NMF_fit_all,\n 'word_ngrams_concat_tf5000_l2_w_holdout': word_ngrams_concat_tf5000_l2_w_holdout,\n 'latent_dirichlet_allocation': latent_dirichlet_allocation,\n 'latent_semantic_indexing_gensim_test': latent_semantic_indexing_gensim_test,\n 'NMF_fit_all_concat_300': NMF_fit_all_concat_300,\n 'NMF_cos_50': NMF_cos_50,\n 'latent_dirichlet_allocation_25': latent_dirichlet_allocation_25,\n 'latent_semantic_indexing_gensim_300_concat_holdout': latent_semantic_indexing_gensim_300_concat_holdout,\n 'NMF_concat_300_holdout': NMF_concat_300_holdout,\n 'word_unigrams_5000_concat_tf_l2_holdout': word_unigrams_5000_concat_tf_l2_holdout,\n 'ppdb': ppdb,\n 'stanford_ppdb': stanford_ppdb_score,\n 'stanford_ppdb_1sent': stanford_ppdb_score_1sent,\n 'stanford_ppdb_2sent': stanford_ppdb_score_2sent,\n 'stanford_ppdb_3sent': stanford_ppdb_score_3sent,\n 'stanford_sentiment': stanford_sentiment,\n 'stanford_sentiment_1sent': stanford_sentiment_1sent,\n 'stanford_sentiment_2sent': stanford_sentiment_2sent,\n 'stanford_sentiment_3sent': stanford_sentiment_3sent,\n 'stanford_wordsim': stanford_based_verb_noun_sim,\n 'stanford_wordsim_1sent': stanford_based_verb_noun_sim_1sent,\n 'stanford_wordsim_2sent': stanford_based_verb_noun_sim_2sent,\n 'stanford_wordsim_3sent': stanford_based_verb_noun_sim_3sent,\n 'stanford_negation': stanford_negation_features,\n 'stanford_negation_1sent': stanford_negation_features_1sent,\n 'stanford_negation_2sent': stanford_negation_features_2sent,\n 'stanford_negation_3sent': stanford_negation_features_3sent,\n 'stanford_avg_words_per_sent': stanford_avg_words_per_sent,\n 'stanford_avg_words_per_sent_1sent': stanford_avg_words_per_sent_1sent,\n 'stanford_avg_words_per_sent_2sent': stanford_avg_words_per_sent_2sent,\n 'stanford_avg_words_per_sent_3sent': stanford_avg_words_per_sent_3sent,\n 'hedging': hedging_features,\n 'sen2sen': sen2sen_similarity_max,\n 'wmdsenSen': word_mover_distance_similarity_sentence_min,\n 'wmdsenDoc': word_mover_distance_wholebody,\n 'sdm_sim': sdm_sim,\n 'discuss': discuss_features,\n 'single_flat_LSTM_50d_100': single_flat_LSTM_50d_100,\n 'char_3grams_5000_concat_all_data': char_3grams_5000_concat_all_data,\n 'lexical_features': lexical_features,\n 'max_diff_twitter_uni_bigrams': max_diff_twitter_uni_bigrams,\n 'mpqa_unigrams': mpqa_unigrams,\n 'negated_context_word_12grams_concat_tf5000_l2_all_data': negated_context_word_12grams_concat_tf5000_l2_all_data,\n 'nrc_emo_lex': nrc_emo_lex,\n 'nrc_hashtag_sentiment_unigram': nrc_hashtag_sentiment_unigram,\n 'nrc_hashtag_sentiment_unigram_POS': nrc_hashtag_sentiment_unigram_POS,\n #'POS_features': POS_features,\n 'readability_features': readability_features,\n 'sentiment140_unigrams': sentiment140_unigrams,\n 'structural_features': structural_features,\n 'latent_dirichlet_allocation_300': latent_dirichlet_allocation_300,\n 'NMF_cos_300': NMF_cos_300\n }\n\n stanceCounter = 0\n for stance in stances:\n h.append(stance['Headline'])\n b.append(dataset.articles[stance['Body ID']])\n bodyId.append(stance['Body ID'])\n headId.append(name+str(stanceCounter))\n stanceCounter += 1\n\n X_feat = []\n for feature in feature_list:\n print(\"calculate feature: \" + str(feature))\n feat = gen_or_load_feats(feature_dict[feature], h, b, features_dir+\"/\"+feature+\"_test.\"+name+'.npy', bodyId, feature, headId, fold=name)\n X_feat.append(feat)\n print(len(feat))\n X = np.concatenate(X_feat, axis=1)\n return X", "def test(self):\n self.load()\n bottleneck_features = np.load(self.feature_path)\n test = bottleneck_features['test']\n _, test_targets = load_dataset(self.image_path_test) \n predictions = [np.argmax(self.model.predict(np.expand_dims(feature, axis=0))) for feature in test]\n test_accuracy = 100*np.sum(np.array(predictions) == np.argmax(test_targets, axis=1))/len(predictions)\n print('{}, test accuracy: {:.4f}%'.format(self.name, test_accuracy))\n return test_accuracy", "def test_T01():", "def test_intent_classifier_get_testing_samples(self):\n pass", "def workbench_scenarios():\n return [\n (\"FeedbackXBlock\",\n \"\"\"<vertical_demo>\n <feedback/>\n </vertical_demo>\n \"\"\"),\n ]", "def return_MatchUpTest_r__():\n\n ####################################################################################################################\n # 1. Initialise test data\n ####################################################################################################################\n\n values = array([470.5, 720.56, 450.9, 295.6, 315.23,\n 70.5, 70.6, 70.3, 70.7, 70.5,\n 71.5, 71.6, 71.3, 71.7,\n 80.5, 80.6, 80.3, 80.7,\n 150.5, 151.1, 149.8, 150.2, 151.4,\n 140.5, 141.1, 139.8, 140.2,\n 160.5, 161.1, 169.8, 160.2,\n 30.2, 20.4, 28.2, 50.7, 45.6,\n 29.2, 37.4, 28.2, 50.7,\n 28.2, 32.4, 22.2, 53.7, ])\n unc = [Uncertainty(1, array([1.6, 1.5, 1.5, 1.3, 1.5])),\n Uncertainty(1, array([3.1, 3.2, 3.2, 3.1, 3.0])),\n Uncertainty(1, array([3.3, 3.4, 3.1, 3.2])),\n Uncertainty(1, array([2.1, 2.2, 2.2, 2.1])),\n Uncertainty(1, array([5.0, 4.7, 5.1, 5.2, 5.3])),\n Uncertainty(1, array([4.2, 4.3, 4.4, 4.3])),\n Uncertainty(1, array([4.0, 3.7, 4.4, 4.7])),\n Uncertainty(1, array([2.2, 1.7, 2.0, 4.3, 2.6])),\n Uncertainty(1, array([2.3, 1.2, 2.3, 4.4])),\n Uncertainty(1, array([3.2, 2.7, 3.0, 5.3]))]\n ks = array([1.2, 1.7, 1.3, 1.4, 1.3, 3.2, 3.7, 3.3, 3.4])\n unck = [Uncertainty(1, array([0.25, 0.25, 0.25, 0.25, 0.25])),\n Uncertainty(1, array([0.2644, 0.2644, 0.2644, 0.2644]))]\n idx = {\"Nm\": [5, 4],\n \"cNm\": [0, 5, 9],\n \"Im\": [[0, 1], [1, 2]],\n \"sensors\": [-1, 1, 2],\n \"sensor_ms\": [1, 3, 3],\n \"n_sensor\": [0, 1, 1, 2, 1, 1, 2, 1, 1, 2],\n \"n_mu\": [1, 1, 2, 2, 1, 2, 2, 1, 2, 2],\n \"n_cov\": [1, 1, 1, 1, 2, 2, 2, 3, 3, 3],\n \"N_var\": [5, 5, 4, 4, 5, 4, 4, 5, 4, 4],\n \"idx\": [0, 5, 10, 14, 18, 23, 27, 31, 36, 40, 44],\n \"Ia\": [1, 1, 1, 2, 2, 2]}\n a = array([1., 1.3, 0.002, 0.5, 1.1, 0.0005])\n\n ####################################################################################################################\n # 3. Initialise MatchUp object\n ####################################################################################################################\n\n MatchUpTest = MatchUp()\n MatchUpTest.values = values\n MatchUpTest.unc = unc\n MatchUpTest.ks = ks\n MatchUpTest.unck = unck\n MatchUpTest.idx = idx\n MatchUpTest.a = a\n\n return MatchUpTest", "def test():\n\t\treturn [\"vice.multizone\",\n\t\t\t[\n\t\t\t\ttest_from_output(),\n\t\t\t\tmig_matrix_row.test(run = False),\n\t\t\t\tmig_matrix.test(run = False),\n\t\t\t\tmig_specs.test(run = False),\n\t\t\t\tzone_array.test(run = False),\n\t\t\t\t_multizone.test(run = False),\n\t\t\t\tsrc_test(run = False)\n\t\t\t]\n\t\t]", "def test_prop_fluctuation(self):\n tmax = 10.0\n dt = 1.0\n\n ini_rate = 80.0\n\n nsteps = int_r(tmax/dt)\n\n tutor = SimpleNeurons(1, out_fct=lambda i: ini_rate + i*20.0/nsteps - 10.0)\n reward = MockReward(lambda _: 1.0)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=1.0,\n use_tutor_baseline=False)\n\n sim = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim.run(tmax)\n\n drates = (tutor_rule.rates - ini_rate)[:, 0]\n\n fluctuations = (np.arange(nsteps)*20.0/nsteps - 10.0)\n mask = (fluctuations > 0)\n ratio = np.mean(drates[mask] / fluctuations[mask])\n\n self.assertLess(np.max(np.abs(drates - ratio*fluctuations)), 1e-6)", "def evaluate_features(trainFeatures, testFeatures):\n classifier = NaiveBayesClassifier.train(trainFeatures)\n\n #initiates referenceSets and testSets\n referenceSets = collections.defaultdict(set)\n testSets = collections.defaultdict(set)\n\n for i, (features, label) in enumerate(testFeatures):\n referenceSets[label].add(i)\n predicted = classifier.classify(features)\n testSets[predicted].add(i)\n\n print 'train on %d instances, test on %d instances' % (len(trainFeatures), len(testFeatures))\n print 'accuracy:', nltk.classify.util.accuracy(classifier, testFeatures)\n print 'pos precision:', precision(referenceSets['pos'], testSets['pos'])\n print 'pos recall:', recall(referenceSets['pos'], testSets['pos'])\n print 'neg precision:',precision(referenceSets['neg'], testSets['neg'])\n print 'neg recall:', recall(referenceSets['neg'], testSets['neg'])\n classifier.show_most_informative_features(50)", "def test_figure4(self):\n\n topics = get_topics('msmarco-passage-dev-subset')\n qrels = get_qrels('msmarco-passage-dev-subset')\n\n self.assertEqual(len(topics), 6980)\n self.assertEqual(len(qrels), 6980)\n\n # Compute the average length of queries:\n avg_qlen = sum([len(topics[t]['title'].split()) for t in topics])/len(topics)\n\n # Compute the average number of relevance judgments per query:\n avg_qrels = sum([len(qrels[t]) for t in topics])/len(topics)\n\n self.assertAlmostEqual(avg_qlen, 5.925, delta=0.001)\n self.assertAlmostEqual(avg_qrels, 1.065, delta=0.001)", "def test_staff_inputs_expressions(self):\r\n problem = self.build_problem(answer=\"1/3\", tolerance=1e-3)\r\n correct_responses = [\"1/3\", \"0.333333\"]\r\n incorrect_responses = []\r\n self.assert_multiple_grade(problem, correct_responses, incorrect_responses)", "def test_040_normalised_goal_difference(self):\n\n def create_premier_league_normalised_goal_diff_fn(fn_team: str):\n team_stat = Stats.n_sample_stats_for_team(cursor=db_in_cursor,\n team=fn_team,\n last_sample_date=self.model_date,\n n_samples=self.num_samples,\n normalize_by_matches=True)\n\n return FeatureModel(input_data=team_stat,\n id=team_stat.team_name,\n feature_model_making_fn=lambda stat: stat.goal_diff\n )\n\n for match_date in played_home_OR_away_before_dates:\n ####\n # ย Build model up to the day before the match\n ####\n self.model_date = match_date - timedelta(days=1)\n self.num_samples = num_matches_in_season\n\n models: {str: FeatureModel} = FeatureModel.create_models_for_all_teams(\n model_making_fn=create_premier_league_normalised_goal_diff_fn, entities=teams)\n\n self.persist_models(model_gen_date=self.model_date, model_description=self.shortDescription(),\n models=models)\n\n self.make_and_store_predictions_for_date(match_date=match_date, models=models)", "def test_conditional():\n # verify that conditioning increases the likelihood of getting a sample with the specified\n # categorical value", "def test_ex_2_3(self):\n\n wam = WAM()\n wam.execute(self.fig_2_3_instrs)\n aW = wam.deref_reg(5)\n aZ = wam.deref_reg(2)\n wam.execute(self.fig_2_4_instrs)\n aX = wam.deref_reg(5)\n aY = wam.deref_reg(4)\n self.assertEqual(wam.get_term_repr(aW), 'f(a)')\n self.assertEqual(wam.get_term_repr(aX), 'f(a)')\n self.assertEqual(wam.get_term_repr(aY), 'f(f(a))')\n self.assertEqual(wam.get_term_repr(aZ), 'f(f(a))')", "def test_threat(self):\n metric = verif.metric.Threat()\n obs = np.array([0, 1, 2, 3])\n fcst = np.array([0, 3, 1, 2])\n\n # Hits: 1\n # FA: 1\n # Miss: 1\n # CR: 0\n interval = verif.interval.Interval(1.5, np.inf, True, True)\n f_interval = verif.interval.Interval(1.5, np.inf, True, True)\n value = metric.compute_from_obs_fcst(obs, fcst, interval, f_interval)\n self.assertEqual(value, 1.0/3)", "def test_T3():", "def test_T3():", "def test_text_classifier_add_testing_samples(self):\n pass", "def test_all_features(self):\n to_create = ['looktest1', 'looktest2', 'looktest3']\n for f in to_create:\n Feature(f).activate()\n\n all_features = Feature.all_features()\n self.assertEqual(len(all_features), len(to_create))\n for f in to_create:\n self.assertTrue(f in all_features)", "def get_predictions():\n\n print(\"OK1\");\n print(\"OK2\");\n return;", "def print_metrics(mva, df_train, df_test,\n y_train, y_test,\n mva_response_train=None, mva_response_test=None,\n w_train=None, w_test=None):\n\n train_prediction = mva.predict(df_train)\n test_prediction = mva.predict(df_test)\n\n if mva_response_train is None:\n mva_response_train = classifiers.evaluate_mva(df_train, mva)\n if mva_response_test is None:\n mva_response_test = classifiers.evaluate_mva(df_test, mva)\n\n print(\"\\nClassification Reports:\")\n print(\"Test sample:\")\n print(classification_report(y_test, test_prediction,\n target_names=[\"background\", \"signal\"]))\n print(\"Training sample:\")\n print(classification_report(y_train, train_prediction,\n target_names=[\"background\", \"signal\"]))\n\n print(\"Confusion matrix:\")\n print(\"Test sample:\")\n print(confusion_matrix(y_test, test_prediction))\n print(\"Training sample:\")\n print(confusion_matrix(y_train, train_prediction))\n print()\n\n print(\"KS Test p-value:\")\n print(\"Signal:\")\n print(ks_2samp(mva_response_train[y_train == 1],\n mva_response_test[y_test == 1],\n None if w_train is None else w_train[y_train == 1],\n None if w_test is None else w_test[y_test == 1])[1])\n print(\"Background:\")\n print(ks_2samp(mva_response_train[y_train == 0],\n mva_response_test[y_test == 0],\n None if w_train is None else w_train[y_train == 0],\n None if w_test is None else w_test[y_test == 0])[1])\n print()\n\n # Try really hard to get the feature importances\n feature_importances = []\n try:\n feature_importances = mva.feature_importances_\n except AttributeError:\n pass\n try: # last step of a pipeline?\n feature_importances = mva.steps[-1][1].feature_importances_\n except AttributeError:\n pass\n try: # grid search?\n feature_importances = mva.best_estimator_.feature_importances_\n except AttributeError:\n pass\n try: # grid search last step of pipeline?\n feature_importances = \\\n mva.steps[-1][1].best_estimator_.feature_importances_\n except AttributeError:\n pass\n try: # grid search of a pipeline?\n feature_importances = \\\n mva.best_estimator_.steps[-1][0].feature_importances_\n except AttributeError:\n pass\n\n if len(feature_importances):\n print(\"Feature importance:\")\n for var, importance in sorted(\n zip(list(df_train), feature_importances),\n key=lambda x: x[1],\n reverse=True):\n print(\"{0:15} {1:.3E}\".format(var, importance))\n else:\n pass\n print()", "def test_expectation():\n\n age = 1e-5\n ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])\n comp1 = SphereComponent(ass_pars1)\n ass_pars2 = np.array([100., 0, 0, 20, 0, 0, 5., 2., age])\n comp2 = SphereComponent(ass_pars2)\n starcounts = [100,100]\n synth_data = SynthData(pars=[ass_pars1, ass_pars2],\n starcounts=starcounts)\n synth_data.synthesise_everything()\n tabletool.convert_table_astro2cart(synth_data.table)\n\n true_memb_probs = np.zeros((np.sum(starcounts), 2))\n true_memb_probs[:starcounts[0], 0] = 1.\n true_memb_probs[starcounts[0]:, 1] = 1.\n\n # star_means, star_covs = tabletool.buildDataFromTable(synth_data.astr_table)\n # all_lnols = em.getAllLnOverlaps(\n # synth_data.astr_table, [comp1, comp2]\n # )\n\n fitted_memb_probs = em.expectation(\n tabletool.build_data_dict_from_table(synth_data.table),\n [comp1, comp2]\n )\n\n assert np.allclose(true_memb_probs, fitted_memb_probs, atol=1e-10)", "def run_automatic_tester():\n number_of_target_maps = len(os.listdir(TargetDetectionTesterSettings.TARGET_DETECTION_REPORT_JSON_FILE_SAVE_PATH))\n overall_true_positive_count = 0\n overall_false_positive_count = 0\n overall_target_count = 0\n\n for index_0 in range(number_of_target_maps):\n\n answer_sheet = json.load(open(os.path.join(TargetDetectionTesterSettings.TARGET_MAP_ANSWER_SHEET_PATH, str(index_0 + 1) + \".json\")))\n answer_list = []\n\n for index_1 in range(len(answer_sheet[\"targets\"])):\n answer_list.append((answer_sheet[\"targets\"][index_1][\"target_center_coordinates\"][0], answer_sheet[\"targets\"][index_1][\"target_center_coordinates\"][1]))\n overall_target_count += len(answer_list)\n\n target_detection_result = json.load(open(os.path.join(TargetDetectionTesterSettings.TARGET_DETECTION_REPORT_JSON_FILE_SAVE_PATH, str(index_0 + 1) + \".json\")))\n result_list = []\n\n for index_2 in range(len(target_detection_result[\"image_processing_results\"])):\n result_list.append((target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][0] + (target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][2] / 2), target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][1] + (target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][3] / 2)))\n\n current_true_positive_count = 0\n current_false_positive_count = 0\n banned_index_list = []\n\n for index_3 in range(len(answer_list)):\n true_positive_found = False\n\n for index_4 in range(len(result_list)):\n is_index_4_banned = False\n\n for index_5 in range(len(banned_index_list)):\n if (index_4 == banned_index_list[index_5]):\n is_index_4_banned = True\n\n if (is_index_4_banned == True):\n continue\n\n correct_target_center_x = answer_list[index_3][0]\n correct_target_center_y = answer_list[index_3][1]\n\n detected_target_center_x = result_list[index_4][0]\n detected_target_center_y = result_list[index_4][1]\n\n if ((abs(correct_target_center_x - detected_target_center_x) <= 20) and (abs(correct_target_center_y - detected_target_center_y) <= 20)):\n current_true_positive_count += 1\n banned_index_list.append(index_4)\n true_positive_found = True\n continue\n\n current_false_positive_count = len(result_list) - current_true_positive_count\n\n overall_true_positive_count += current_true_positive_count\n overall_false_positive_count += current_false_positive_count\n\n percentage = 100 * float(overall_true_positive_count) / (overall_target_count)\n\n TargetDetectionTesterLogger.log(\"--------------------------------------------------\")\n TargetDetectionTesterLogger.log(\"Total True Positive Count: \" + str(overall_true_positive_count))\n TargetDetectionTesterLogger.log(\"Total False Positive Count: \" + str(overall_false_positive_count))\n TargetDetectionTesterLogger.log(\"Percentage of Successfully Detected Targets: \" + str(percentage) + \"%\")\n TargetDetectionTesterLogger.log(\"--------------------------------------------------\")", "def test_example(self, example_dataset, expected_result):\n\n transformer = PreprocessFeatures()\n result = transformer.fit_transform(example_dataset)\n\n assert (result == expected_result).all()", "async def test_state_characteristics(hass: HomeAssistant) -> None:\n now = dt_util.utcnow()\n current_time = datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC)\n start_datetime = datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC)\n characteristics: Sequence[dict[str, Any]] = (\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"average_linear\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": 10.68,\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"average_step\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": 11.36,\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"average_timeless\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"change\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(0),\n \"value_9\": float(round(VALUES_NUMERIC[-1] - VALUES_NUMERIC[0], 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"change_sample\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(\n round(\n (VALUES_NUMERIC[-1] - VALUES_NUMERIC[0])\n / (len(VALUES_NUMERIC) - 1),\n 2,\n )\n ),\n \"unit\": \"ยฐC/sample\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"change_second\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(\n round(\n (VALUES_NUMERIC[-1] - VALUES_NUMERIC[0])\n / (60 * (len(VALUES_NUMERIC) - 1)),\n 2,\n )\n ),\n \"unit\": \"ยฐC/s\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"count\",\n \"value_0\": 0,\n \"value_1\": 1,\n \"value_9\": 9,\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"datetime_newest\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"value_9\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"datetime_oldest\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"value_9\": (start_datetime + timedelta(minutes=1)).isoformat(),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"datetime_value_max\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"value_9\": (start_datetime + timedelta(minutes=2)).isoformat(),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"datetime_value_min\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"value_9\": (start_datetime + timedelta(minutes=5)).isoformat(),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"distance_95_percent_of_values\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(round(2 * 1.96 * statistics.stdev(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"distance_99_percent_of_values\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(round(2 * 2.58 * statistics.stdev(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"distance_absolute\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(0),\n \"value_9\": float(max(VALUES_NUMERIC) - min(VALUES_NUMERIC)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"mean\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"median\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(round(statistics.median(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"noisiness\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(round(sum([3, 4.8, 10.2, 1.2, 5.4, 2.5, 7.3, 8]) / 8, 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"percentile\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": 9.2,\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"standard_deviation\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(round(statistics.stdev(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"sum\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(sum(VALUES_NUMERIC)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"sum_differences\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(\n sum(\n [\n abs(20 - 17),\n abs(15.2 - 20),\n abs(5 - 15.2),\n abs(3.8 - 5),\n abs(9.2 - 3.8),\n abs(6.7 - 9.2),\n abs(14 - 6.7),\n abs(6 - 14),\n ]\n )\n ),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"sum_differences_nonnegative\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(\n sum(\n [\n 20 - 17,\n 15.2 - 0,\n 5 - 0,\n 3.8 - 0,\n 9.2 - 3.8,\n 6.7 - 0,\n 14 - 6.7,\n 6 - 0,\n ]\n )\n ),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"total\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(sum(VALUES_NUMERIC)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"value_max\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(max(VALUES_NUMERIC)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"value_min\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": float(VALUES_NUMERIC[-1]),\n \"value_9\": float(min(VALUES_NUMERIC)),\n \"unit\": \"ยฐC\",\n },\n {\n \"source_sensor_domain\": \"sensor\",\n \"name\": \"variance\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": float(round(statistics.variance(VALUES_NUMERIC), 2)),\n \"unit\": \"ยฐCยฒ\",\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"average_step\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": STATE_UNKNOWN,\n \"value_9\": 50.0,\n \"unit\": \"%\",\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"average_timeless\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": 100.0,\n \"value_9\": float(\n round(100 / len(VALUES_BINARY) * VALUES_BINARY.count(\"on\"), 2)\n ),\n \"unit\": \"%\",\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"count\",\n \"value_0\": 0,\n \"value_1\": 1,\n \"value_9\": len(VALUES_BINARY),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"count_on\",\n \"value_0\": 0,\n \"value_1\": 1,\n \"value_9\": VALUES_BINARY.count(\"on\"),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"count_off\",\n \"value_0\": 0,\n \"value_1\": 0,\n \"value_9\": VALUES_BINARY.count(\"off\"),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"datetime_newest\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"value_9\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"datetime_oldest\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": (start_datetime + timedelta(minutes=9)).isoformat(),\n \"value_9\": (start_datetime + timedelta(minutes=1)).isoformat(),\n \"unit\": None,\n },\n {\n \"source_sensor_domain\": \"binary_sensor\",\n \"name\": \"mean\",\n \"value_0\": STATE_UNKNOWN,\n \"value_1\": 100.0,\n \"value_9\": float(\n round(100 / len(VALUES_BINARY) * VALUES_BINARY.count(\"on\"), 2)\n ),\n \"unit\": \"%\",\n },\n )\n sensors_config = []\n for characteristic in characteristics:\n sensors_config.append(\n {\n \"platform\": \"statistics\",\n \"name\": f\"test_{characteristic['source_sensor_domain']}_{characteristic['name']}\",\n \"entity_id\": f\"{characteristic['source_sensor_domain']}.test_monitored\",\n \"state_characteristic\": characteristic[\"name\"],\n \"max_age\": {\"minutes\": 8}, # 9 values spaces by one minute\n }\n )\n\n with freeze_time(current_time) as freezer:\n assert await async_setup_component(\n hass,\n \"sensor\",\n {\"sensor\": sensors_config},\n )\n await hass.async_block_till_done()\n\n # With all values in buffer\n\n for i in range(len(VALUES_NUMERIC)):\n current_time += timedelta(minutes=1)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n hass.states.async_set(\n \"sensor.test_monitored\",\n str(VALUES_NUMERIC[i]),\n {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS},\n )\n hass.states.async_set(\n \"binary_sensor.test_monitored\",\n str(VALUES_BINARY[i]),\n {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS},\n )\n await hass.async_block_till_done()\n\n for characteristic in characteristics:\n state = hass.states.get(\n f\"sensor.test_{characteristic['source_sensor_domain']}_{characteristic['name']}\"\n )\n assert state is not None, (\n \"no state object for characteristic \"\n f\"'{characteristic['source_sensor_domain']}/{characteristic['name']}' \"\n \"(buffer filled)\"\n )\n assert state.state == str(characteristic[\"value_9\"]), (\n \"value mismatch for characteristic \"\n f\"'{characteristic['source_sensor_domain']}/{characteristic['name']}' \"\n \"(buffer filled) - \"\n f\"assert {state.state} == {str(characteristic['value_9'])}\"\n )\n assert (\n state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == characteristic[\"unit\"]\n ), f\"unit mismatch for characteristic '{characteristic['name']}'\"\n\n # With single value in buffer\n\n current_time += timedelta(minutes=8)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n await hass.async_block_till_done()\n\n for characteristic in characteristics:\n state = hass.states.get(\n f\"sensor.test_{characteristic['source_sensor_domain']}_{characteristic['name']}\"\n )\n assert state is not None, (\n \"no state object for characteristic \"\n f\"'{characteristic['source_sensor_domain']}/{characteristic['name']}' \"\n \"(one stored value)\"\n )\n assert state.state == str(characteristic[\"value_1\"]), (\n \"value mismatch for characteristic \"\n f\"'{characteristic['source_sensor_domain']}/{characteristic['name']}' \"\n \"(one stored value) - \"\n f\"assert {state.state} == {str(characteristic['value_1'])}\"\n )\n\n # With empty buffer\n\n current_time += timedelta(minutes=1)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n await hass.async_block_till_done()\n\n for characteristic in characteristics:\n state = hass.states.get(\n f\"sensor.test_{characteristic['source_sensor_domain']}_{characteristic['name']}\"\n )\n assert state is not None, (\n \"no state object for characteristic \"\n f\"'{characteristic['source_sensor_domain']}/{characteristic['name']}' \"\n \"(buffer empty)\"\n )\n assert state.state == str(characteristic[\"value_0\"]), (\n \"value mismatch for characteristic \"\n f\"'{characteristic['source_sensor_domain']}/{characteristic['name']}' \"\n \"(buffer empty) - \"\n f\"assert {state.state} == {str(characteristic['value_0'])}\"\n )", "def part2a_0():\n xs = exampleInput\n phi = Counter({('-BEGIN-', '-FEAT-'): 1.0, ('-FEAT-', 'Beautiful'): 1.0, ('-FEAT-', 'PREV:-BEGIN-'): 1.0, ('-FEAT-', 'NEXT:2'): 1.0, ('-FEAT-', '-CAPITALIZED-'): 1.0, ('-FEAT-', '-POST-CAPITALIZED-'): 0.0})\n phi_ = submission.nerFeatureFunction(0, '-BEGIN-', '-FEAT-', xs)\n grader.requireIsTrue( Counters.approximateEquals(phi, phi_) )\n\n phi = Counter({('-FEAT-', '-SIZE-'): 1.0, ('-SIZE-', 'PREV:Beautiful'): 1.0, ('-SIZE-', 'NEXT:bedroom'): 1.0, ('-SIZE-', '-PRE-CAPITALIZED-'): 1.0, ('-SIZE-', '2'): 1.0, ('-SIZE-', '-POST-CAPITALIZED-'): 0.0, ('-SIZE-', '-CAPITALIZED-'): 0.0})\n phi_ = submission.nerFeatureFunction(1, '-FEAT-', '-SIZE-', xs)\n grader.requireIsTrue( Counters.approximateEquals(phi, phi_) )\n \n phi = Counter({('-SIZE-', '-SIZE-'): 1.0, ('-SIZE-', 'PREV:2'): 1.0, ('-SIZE-', 'bedroom'): 1.0, ('-SIZE-', 'NEXT:-END-'): 1.0, ('-SIZE-', '-CAPITALIZED-'): 0.0, ('-SIZE-', '-PRE-CAPITALIZED-'): 0.0})\n phi_ = submission.nerFeatureFunction(2, '-SIZE-', '-SIZE-', xs)\n grader.requireIsTrue( Counters.approximateEquals(phi, phi_) )", "def test_getOrderedFeatures(self):\n print 'Running %s ...' % getName()\n \n s1 = self.sequenceListingFixture.create_sequence_instance(self.sequenceListing) \n \n# test that source feature is at index 0 when feature table has only 1 feature \n source_feature = next((f for f in s1.feature_set.all() if f.featureKey == 'source'), None)\n ordered_features = s1.getOrderedFeatures()\n self.assertTrue(source_feature)\n self.assertEqual(0, ordered_features.index(source_feature))\n \n# add feature\n f1_1 = Feature.objects.create(sequence=s1, \n featureKey='misc_feature', \n location='4')\n \n ordered_features_after_f1_1 = s1.getOrderedFeatures()\n \n self.assertEqual(0, ordered_features_after_f1_1.index(source_feature))\n self.assertEqual(1, ordered_features_after_f1_1.index(f1_1))\n \n # add feature\n f1_2 = Feature.objects.create(sequence=s1, \n featureKey='misc_feature', \n location='2')\n \n ordered_features_after_f1_2 = s1.getOrderedFeatures()\n \n self.assertEqual(0, ordered_features_after_f1_2.index(source_feature))\n self.assertEqual(1, ordered_features_after_f1_2.index(f1_2))\n self.assertEqual(2, ordered_features_after_f1_2.index(f1_1))\n \n # add feature\n f1_3 = Feature.objects.create(sequence=s1, \n featureKey='variation', \n location='9')\n \n ordered_features_after_f1_3 = s1.getOrderedFeatures()\n \n self.assertEqual(0, ordered_features_after_f1_3.index(source_feature))\n self.assertEqual(1, ordered_features_after_f1_3.index(f1_2))\n self.assertEqual(2, ordered_features_after_f1_3.index(f1_1))\n self.assertEqual(3, ordered_features_after_f1_3.index(f1_3))\n \n # add feature\n f1_4 = Feature.objects.create(sequence=s1, \n featureKey='allele', \n location='9')\n \n ordered_features_after_f1_4 = s1.getOrderedFeatures()\n \n self.assertEqual(0, ordered_features_after_f1_4.index(source_feature))\n self.assertEqual(1, ordered_features_after_f1_4.index(f1_2))\n self.assertEqual(2, ordered_features_after_f1_4.index(f1_1))\n self.assertEqual(3, ordered_features_after_f1_4.index(f1_4))\n self.assertEqual(4, ordered_features_after_f1_4.index(f1_3))\n \n # add feature\n f1_5 = Feature.objects.create(sequence=s1, \n featureKey='iDNA', \n location='9')\n \n ordered_features_after_f1_5 = s1.getOrderedFeatures()\n \n self.assertEqual(0, ordered_features_after_f1_5.index(source_feature))\n self.assertEqual(1, ordered_features_after_f1_5.index(f1_2))\n self.assertEqual(2, ordered_features_after_f1_5.index(f1_1))\n self.assertEqual(3, ordered_features_after_f1_5.index(f1_4))\n self.assertEqual(4, ordered_features_after_f1_5.index(f1_5))\n self.assertEqual(5, ordered_features_after_f1_5.index(f1_3))\n \n # add feature this will be ordered before 'allele', because \n# capital letters are lower than lower case in ASCII\n f1_6 = Feature.objects.create(sequence=s1, \n featureKey='CDS', \n location='9..17')\n \n ordered_features_after_f1_6 = s1.getOrderedFeatures()\n \n self.assertEqual(0, ordered_features_after_f1_6.index(source_feature))\n self.assertEqual(1, ordered_features_after_f1_6.index(f1_2))\n self.assertEqual(2, ordered_features_after_f1_6.index(f1_1))\n self.assertEqual(3, ordered_features_after_f1_6.index(f1_6))\n self.assertEqual(4, ordered_features_after_f1_6.index(f1_4))\n self.assertEqual(5, ordered_features_after_f1_6.index(f1_5))\n self.assertEqual(6, ordered_features_after_f1_6.index(f1_3))", "def testdata_matcher(fname1='easy1.png', fname2='easy2.png'):\n import utool as ut\n #import vtool as vt\n from vtool import image as gtool\n from vtool import features as feattool\n fpath1 = ut.grab_test_imgpath(fname1)\n fpath2 = ut.grab_test_imgpath(fname2)\n featkw = dict(rotation_invariance=True)\n kpts1, vecs1 = feattool.extract_features(fpath1, **featkw)\n kpts2, vecs2 = feattool.extract_features(fpath2, **featkw)\n #if featkw['rotation_invariance']:\n # print('ori stats 1 ' + ut.get_stats_str(vt.get_oris(kpts2)))\n # print('ori stats 2 ' + ut.get_stats_str(vt.get_oris(kpts1)))\n rchip1 = gtool.imread(fpath1)\n rchip2 = gtool.imread(fpath2)\n #chip1_shape = vt.gtool.open_image_size(fpath1)\n chip2_shape = gtool.open_image_size(fpath2)\n dlen_sqrd2 = chip2_shape[0] ** 2 + chip2_shape[1]\n testtup = (rchip1, rchip2, kpts1, vecs1, kpts2, vecs2, dlen_sqrd2)\n return testtup" ]
[ "0.6707237", "0.6620154", "0.6500564", "0.64482635", "0.6439859", "0.6421214", "0.6377413", "0.6242838", "0.62241733", "0.61963874", "0.61854374", "0.6150502", "0.6111971", "0.6109932", "0.60865235", "0.60865235", "0.6078514", "0.6074583", "0.6064137", "0.6050908", "0.6041128", "0.60258275", "0.60203993", "0.6020138", "0.6010881", "0.5997143", "0.59935963", "0.5989582", "0.59797925", "0.59725654", "0.59557086", "0.5945495", "0.59303653", "0.59287685", "0.5913477", "0.58935803", "0.5889088", "0.58858997", "0.58795506", "0.5867328", "0.58632845", "0.5829552", "0.5823277", "0.58228266", "0.58146936", "0.58117497", "0.58101255", "0.57919765", "0.5783417", "0.5773172", "0.5770025", "0.57687324", "0.5765184", "0.57647175", "0.57622564", "0.5761878", "0.576005", "0.5758379", "0.5756319", "0.57539964", "0.57514554", "0.57382494", "0.57358253", "0.573518", "0.5734418", "0.5731275", "0.5730125", "0.57269377", "0.57269377", "0.5721491", "0.57183963", "0.5711678", "0.570806", "0.5707125", "0.57064307", "0.57063687", "0.5700248", "0.56958914", "0.5695655", "0.5692286", "0.56905675", "0.5688233", "0.5686158", "0.5680265", "0.56780255", "0.56761104", "0.5675834", "0.56748646", "0.56748646", "0.56732774", "0.56687135", "0.56654555", "0.5663966", "0.5662738", "0.56487465", "0.56478757", "0.56397253", "0.56369865", "0.5634425", "0.5625525" ]
0.6980132
0
Compute a MaxEnt reward function from demonstration trajectories
def maxEntIRL(trans_mat, state_features, demos, seed_weights, n_epochs, horizon, learning_rate): feature_exp = find_feature_expectations(state_features, demos) n_states = np.shape(trans_mat)[0] n_actions = np.shape(trans_mat)[1] n_features = np.shape(state_features)[1] r_weights = np.zeros(n_features) + seed_weights # Probability for initial state trajectories start_state_count = np.zeros(n_states) for demo in demos: start_state_count[demo[0]] += 1 p_start_dist = start_state_count / np.shape(demos)[0] # Iterate for epoch in range(n_epochs): # print("epoch: {}".format(epoch)) # Calculate Max Ent Policy policy = calcMaxEntPolicy(trans_mat, horizon, r_weights, state_features) # Calculate Expected State Frequency expected_svf = calcExpectedStateFreq(trans_mat, horizon, p_start_dist, policy) # Update reward weights using gradient gradient = feature_exp - expected_svf.T.dot(state_features) r_weights += learning_rate * gradient print epoch, np.linalg.norm(gradient) print policy print policy.argmax(axis=1) return r_weights
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _dense_reward(self) -> float:\n y = 1\n target_goal_dists = []\n for target_shape in self.__debris_shapes:\n target_pos = target_shape.shape_body.position\n goal_pos = (target_pos[0], y) # Top of screen.\n dist = np.linalg.norm(target_pos - goal_pos)\n if target_pos[1] > 0.88:\n dist = 0\n target_goal_dists.append(dist)\n target_goal_dists = np.mean(target_goal_dists)\n return -1.0 * target_goal_dists", "def reward_calc(state, action, setpoint):\r\n return max(-np.square(state - setpoint) - action, -150)", "def reward_func(self, state):\n if abs(state['theta']) < 1:\n return 2.0\n\n elif abs(state['theta']) < 3:\n return 0.0\n\n elif abs(state['theta']) > 30:\n return -100.0\n return -2.0", "def getReward(self):\n# def evaluateFitness(self):\n fitness = 0.0\n distance = self.env.getDistance()\n speed = self.env.getSpeed()\n theta = self.env.getOrientation()\n\n ## implementation 101\n timeBonus = (self.maxTime - self.t)/self.maxTime\n alpha = 1.0/((1+distance)*(1+fabs(theta))*(speed+1));\n if distance < 0.5*self.env.init_distance :\n if(distance < self.env.vicinity_distance and\n abs(theta) < self.env.vicinity_orientation and\n speed < self.env.vicinity_speed ):\n fitness = 1 + timeBonus; \n else:\n fitness = alpha;\n else: fitness = 0\n self.lastFitness = fitness\n if fitness > self.bestFitness : \n self.bestFitness = fitness \n\n return fitness", "def reward_function(self):\r\n def R(state, decision, nodes):\r\n return -1.0/1000*nodes['G'].get_preds_value(state)*(decision['G:R_1']+decision['G:L'])\r\n \r\n return R", "def _preprocess_experience(self):\n observed_inputs = []\n observed_reward = []\n predicted_outputs = []\n distance_from_reward = []\n next_state = []\n # process inputs and outputs to train the net\n for episode in self.examples:\n episode_match, example_reward = episode\n last_step = True\n for n, step in enumerate(reversed(episode_match)):\n this_state = state_from_hash(step.state_t)\n next_state.append(state_from_hash(step.action_t))\n observed_inputs.append(np.hstack((this_state,\n this_state != next_state[-1]))\n .flatten())\n distance_from_reward.append(n)\n # now we have to evaluate max_{s'}[Q(a',s')]\n # let's see all possible actions two steps ahead\n two_ahead = []\n for possible_action in self.state_space[step.action_t].actions:\n possible_action = state_from_hash(possible_action)\n two_ahead.append(np.hstack((next_state[-1],\n next_state[-1] != possible_action))\n .flatten())\n if not two_ahead:\n # if it's a terminal state, no two-ahead, so set the max to 0\n max_next_state = 0\n else:\n # evaluate Q on the two-ahead actions\n two_ahead = np.array(two_ahead)\n two_ahead[two_ahead == 2] = -1\n max_next_state = self.sess.run(\n self.output,\n feed_dict={self.input: two_ahead}).flatten()\n\n # calc the maximum\n max_next_state = np.max(max_next_state)\n predicted_outputs.append(max_next_state)\n if last_step:\n # because we start from last step, `last_step` will be true\n observed_reward.append(example_reward)\n # then set it to false so non-last steps get reward 0\n last_step = False\n else:\n observed_reward.append(0)\n # Q-network output from the inputs\n predicted_outputs = self.discount * np.vstack(predicted_outputs).flatten()\n observed_inputs = np.array(observed_inputs)\n # possible max value in a state is 2, set all 2's to -1's\n observed_inputs[observed_inputs == 2] = -1\n observed_reward = np.vstack(observed_reward).flatten()\n return observed_inputs, observed_reward, predicted_outputs, distance_from_reward", "def predict_fn(future_action, state):\n model = get_model()\n rewards = model((state, future_action))\n return {\"reward\": rewards}", "def _compute_reward(self):\n last_score = self.episode_qualities[-2]\n new_score = self.episode_qualities[-1]\n reward = new_score - last_score\n return reward", "def learn(self, state, action, reward, next_state):\r\n\r\n \"\"\"Please Fill Your Code Here.\r\n \"\"\"\r\n self.Q[state][action] = self.Q[state][action] + self.alpha * (reward + self.gamma * max(self.Q[next_state]) - self.Q[state][action])\r\n\r\n return 0", "def reward(input):\n state = np.array([input[0], input[1]])\n action = input[2]\n action = np.clip(action, -2.0, 2.0)\n costs = angle_normalize(state[0])**2 + .1 * state[1]**2 + .001 * (action**2)\n\n return - costs", "def _get_reward(self):\n if self.status():\n return self.current_step/self.ep_length # the reward is proportional to the duration \n else:\n return 0", "def get_reward(self, observations, actions):\n\n #initialize and reshape as needed, for batch mode\n self.reward_dict = {}\n if(len(observations.shape)==1):\n observations = np.expand_dims(observations, axis = 0)\n actions = np.expand_dims(actions, axis = 0)\n batch_mode = False\n else:\n batch_mode = True\n\n #get vars\n xvel = observations[:, 9].copy()\n body_angle = observations[:, 2].copy()\n front_leg = observations[:, 6].copy()\n front_shin = observations[:, 7].copy()\n front_foot = observations[:, 8].copy()\n zeros = np.zeros((observations.shape[0],)).copy()\n\n # ranges\n leg_range = 0.2\n shin_range = 0\n foot_range = 0\n penalty_factor = 10\n\n #calc rew\n self.reward_dict['run'] = xvel\n\n front_leg_rew = zeros.copy()\n front_leg_rew[front_leg>leg_range] = -penalty_factor\n self.reward_dict['leg'] = front_leg_rew\n\n front_shin_rew = zeros.copy()\n front_shin_rew[front_shin>shin_range] = -penalty_factor\n self.reward_dict['shin'] = front_shin_rew\n\n front_foot_rew = zeros.copy()\n front_foot_rew[front_foot>foot_range] = -penalty_factor\n self.reward_dict['foot'] = front_foot_rew\n\n # total reward\n self.reward_dict['r_total'] = self.reward_dict['run'] + self.reward_dict['leg'] + self.reward_dict['shin'] + self.reward_dict['foot']\n\n #return\n dones = zeros.copy()\n if(not batch_mode):\n return self.reward_dict['r_total'][0], dones[0]\n return self.reward_dict['r_total'], dones", "def __generate_reward_function(self):\n K = -3\n self.reward = np.array([[10, 0, K],\n [0, 2, 0],\n [K, 0, 10]])", "def get_reward(self):\n \n pos_error = np.sum(abs(self.sim.pose[:3] - self.target_pos[:3]))\n pos_error = np.log(pos_error)\n z_error = abs(self.sim.pose[2] - self.target_pos[2])\n velocity_error = np.dot(np.subtract(1, np.tanh(self.sim.pose[:3])), self.sim.v)\n reward = 1. - pos_error - 0.02 * z_error\n #reward = 1 - z_error - xy_erro, r/800 - ((1-z_error)*z_v/100) - angv/20\n reward = np.clip(reward, -2, None)\n\n #reward = np.maximum(np.minimum(reward, max_reward), min_reward)\n\n return reward", "def goal(self) -> Goal:\n return MaxReward()", "def _reward(self):\n\n return 1-self.step_count/ self.max_steps", "def _get_reward(self, action):\n HIRE_COST = 1 # TODO 7/29/20 - Determine significance of this value\n\n # Lookup the state representation using the cur_state index. Then we\n # can get the candidate productivity score.\n obs = self.observation_function[self.cur_state]\n prod_score = obs[1]\n r = action*(prod_score - HIRE_COST)\n return r", "def finish_episode(tribes, learners, optimizers, gamma, cuda): \n \n num_learners = len(learners)\n total_norms = [0 for i in range(num_learners)]\n policy_losses = [[] for i in range(num_learners)]\n losses = [[] for i in range(num_learners)]\n T_reward = []\n\n \n for i in range(num_learners):\n\n R = 0\n saved_actions = learners[i].saved_actions\n \n for t in tribes:\n if t.name is learners[i].tribe:\n \n # Based on team culture, calculate the team reward for the agent \n culture = t.culture['name']\n \n if culture is 'cooperative':\n T_reward = t.tribal_awards()\n elif culture is 'individualist':\n T_reward = t.tribal_awards()\n elif culture is 'no_fragging':\n T_reward = t.tribal_awards(US_hits = learners[i].US_hits)\n elif culture is 'pacifist':\n T_reward = t.tribal_awards(tag_hist = learners[i].tag_hist)\n elif culture is 'pacifist_exile':\n T_reward = t.tribal_awards(tag_hist = learners[i].tag_hist, \\\n in_banned_hist=learners[i].in_banned_hist)\n elif culture is 'pacifist_follower':\n T_reward = t.tribal_awards(tag_hist = learners[i].tag_hist, \\\n in_target_hist=learners[i].in_target_hist)\n elif culture is 'warlike':\n T_reward = t.tribal_awards(US_hits = learners[i].US_hits,THEM_hits = learners[i].THEM_hits)\n else:\n T_reward = t.tribal_awards()\n \n # For debug only\n # print('Agent{} receives tribal award from Tribe{}'.format(i,t.name))\n # print (T_reward)\n # print (learners[i].rewards)\n \n # Do not implement actor-critic for now\n # value_losses = []\n \n rewards = deque()\n\n for r,T in zip(learners[i].rewards[::-1],T_reward[::-1]):\n # The agent is incentivized to cooperate by an award of 30% of what the tribe takes\n # in by all its members\n R = r + T + gamma * R\n rewards.appendleft(R)\n \n rewards = list(rewards)\n rewards = torch.Tensor(rewards)\n if cuda:\n rewards = rewards.cuda()\n\n # z-score rewards\n rewards = (rewards - rewards.mean()) / (1.1e-7+rewards.std())\n \n #Debug \n #print (rewards) \n \n \"\"\"\n Do not implement actor-critic for now!!!\n for (log_prob, state_value), r in zip(saved_actions, rewards):\n reward = r - state_value.data[0]\n policy_losses.append(-log_prob * Variable(reward))\n r = torch.Tensor([r])\n if cuda:\n r = r.cuda()\n value_losses.append(torch.nn.functional.smooth_l1_loss(state_value,\n Variable(r)))\n\n optimizer.zero_grad()\n loss = torch.stack(policy_losses).sum() + torch.stack(value_losses).sum()\n loss.backward() \n \n \n \"\"\"\n for log_prob, r in zip(saved_actions, rewards):\n r = torch.Tensor([r])\n if cuda:\n r = r.cuda()\n policy_losses[i].append(-log_prob * Variable(r))\n\n optimizers[i].zero_grad()\n losses[i] = torch.stack(policy_losses[i]).sum()\n losses[i].backward()\n \n # Gradient Clipping Update: prevent exploding gradient\n total_norms[i] = torch.nn.utils.clip_grad_norm_(learners[i].parameters(), 8000)\n \n optimizers[i].step()\n learners[i].clear_history() # clear an agent's history at the end of episode\n\n\n return total_norms", "def max_expected_future_reward(q_table: np.ndarray, state: int,\n actions: List[int] = None) -> float:\n if actions is None:\n actions = list(range(q_table.shape[1]))\n return np.max(q_table[state, actions])", "def update(self, state, action, nextState, reward):\n \"\"\"Description:\n Use second equation in slide 71 of MDP\n Adjest weight of active features depend on tranistion \n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n feat = self.featExtractor.getFeatures(state, action)\n\n # if weight is empty, then weight will need to initial to 1 for all features\n # According to which Extractor user choose, weight counter will have equal number of keys.\n if len(self.weight) == 0:\n feat = self.featExtractor.getFeatures(state, action)\n self.weight.incrementAll(feat.keys(), 1)\n \n maxQns = self.getValue(nextState)\n if maxQns == None:\n maxQns = 0\n Qsa = self.getQValue(state, action)\n difference = ( reward + self.discountRate * maxQns ) - Qsa\n \n for key in self.weight.keys():\n self.weight[key] += (self.alpha * difference * feat[key])\n \n \n \"\"\" END CODE \"\"\"", "def compute_intrinsic_reward(self, next_obs):\r\n next_obs = torch.tensor(next_obs, dtype=torch.float, device=self.device)\r\n #next_obs = torch.FloatTensor(next_obs).to(self.device)\r\n\r\n target_next_feature = self.rnd.target(next_obs)\r\n predict_next_feature = self.rnd.predictor(next_obs)\r\n intrinsic_reward = (target_next_feature - predict_next_feature).pow(2).mean(1) ### MSE --- Issues\r\n #intrinsic_reward = (target_next_feature - predict_next_feature).pow(2).sum(1) / 2\r\n\r\n return intrinsic_reward.data.cpu().numpy()", "def get_reward_function(self):\n R_fn = np.zeros(self.n_states)\n R_fn[0] = 1.0\n\n return R_fn", "def loss_function(agent, trajectories):\n # All ALL_CAPS variables are constants.\n\n # QUESTIOM: The trajectories already have behavior_logits, why is the need\n # to calculate the target_logits?\n # trajectories shape: list of trajectory\n # target_logits: ArgsActionLogits\n target_logits, baselines = agent.unroll(trajectories)\n\n trajectories = U.stack_namedtuple(trajectories) \n trajectories = U.namedtuple_zip(trajectories) \n\n loss_actor_critic = 0.\n if True:\n rewards = torch.tensor(trajectories.reward, dtype=torch.float32, device=device)\n print(\"trajectories.reward\", rewards) if debug else None \n print(\"trajectories.reward.shape\", rewards.shape) if debug else None\n\n # use normalize\n if False:\n scale_dim = 1\n rewards = (rewards - torch.mean(rewards, dim=scale_dim, keepdim=True)) / (torch.std(rewards, dim=scale_dim, keepdim=True) + 1e-9)\n\n print(\"trajectories.reward\", rewards) if debug else None \n print(\"trajectories.reward.shape\", rewards.shape) if debug else None\n\n lambda_loss = td_lambda_loss(baselines[0], rewards, trajectories)\n print(\"lambda_loss:\", lambda_loss) if 1 else None\n loss_actor_critic += (10. * lambda_loss)\n\n # we add the split_vtrace_pg_loss\n pg_loss = split_vtrace_pg_loss(target_logits, baselines[0], rewards, trajectories)\n print(\"pg_loss:\", pg_loss) if 1 else None\n loss_actor_critic += (1.0 * pg_loss)\n\n UPGO_WEIGHT = 1.0\n loss_upgo = UPGO_WEIGHT * split_upgo_loss(target_logits, baselines[0], trajectories)\n print(\"loss_upgo:\", loss_upgo) if debug else None\n\n # note: we want to maximize the entropy\n # so we gradient descent the -entropy\n # Original AlphaStar pseudocode is wrong\n # AlphaStar: loss_ent = entropy_loss(trajectories.behavior_logits, trajectories.masks)\n loss_ent = 3 * (- entropy_loss_for_all_arguments(target_logits, trajectories.masks))\n print(\"loss_ent:\", loss_ent) if 1 else None\n\n #loss_all = target_logits.action_type.sum()\n loss_all = loss_actor_critic + loss_ent # + loss_upgo\n\n loss_list = [lambda_loss, pg_loss, loss_upgo, loss_ent]\n\n return loss_all, loss_list", "def gae(done, rewards, values, n_envs, steps_per_env, gamma, gae_lambda, device):\n advantages = torch.zeros((n_envs, steps_per_env, 1), dtype=torch.float, device=device)\n last_advantage = 0\n for state in reversed(range(steps_per_env)):\n error = rewards[:, state] + gamma * values[:, state + 1] * (~done[:, state]) - values[:, state]\n last_advantage = (error + gamma * gae_lambda * last_advantage * (~done[:, state]))\n\n advantages[:, state] = last_advantage\n\n return advantages", "def _get_reward(self, normalized_state, normalized_unconstrained_action, normalized_constrained_action):\n denormalized_unconstrained_charge_rate_in_W = self.denormalize_network_output(normalized_unconstrained_action)\n denormalized_constrained_charge_rate_in_W = self.denormalize_network_output(normalized_constrained_action)\n denormalized_state = normalized_state * self.energy_system.stm_train_subsequent_states_stds + self.energy_system.stm_train_subsequent_states_means\n\n cost_of_net_drawn_electricity = self._get_cost_of_net_drawn_electricity_in_euros(denormalized_state, denormalized_constrained_charge_rate_in_W)\n charge_rate_punishment = self._get_punishment_for_excessive_charge_rate(denormalized_unconstrained_charge_rate_in_W)\n soc_punishment = self._get_punishment_for_impossible_resulting_soc(denormalized_state, denormalized_unconstrained_charge_rate_in_W) \n reward = - cost_of_net_drawn_electricity - charge_rate_punishment - soc_punishment\n #tf.summary.scalar('cost_of_net_drawn_electricity in euros', cost_of_net_drawn_electricity) \n #tf.summary.scalar('reward', reward)\n\n return reward, cost_of_net_drawn_electricity", "def run_episode(\n initial_state: tf.Tensor, model: tf.keras.Model, max_steps: int\n) -> Tuple[tf.Tensor, tf.Tensor, tf.Tensor]:\n\n # action_probs = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True)\n actions = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True)\n action_probs = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True)\n values = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True)\n rewards = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True)\n\n initial_state_shape = initial_state.shape\n state = initial_state\n\n for t in tf.range(max_steps):\n # Convert state into a batched tensor (batch size = 1)\n state = tf.expand_dims(state, 0)\n\n # Run the model and to get action probabilities and critic value\n # action_logits_t, value = model(state)\n a_mu, a_sigma, value = model(state)\n a_sigma = a_sigma + 0.01\n # tf.print(\"value:\", value)\n\n # Sample next action from the action probability distribution\n action_rand = tf.random.normal([1], a_mu, a_sigma, tf.float32)\n action_probs_t = tf.compat.v1.distributions.Normal(a_mu, a_sigma).prob(\n action_rand\n )\n action = tf.math.tanh(action_rand) # R -> [-1,1]\n \"\"\"\n tf.print(\"a_mu:\", a_mu)\n tf.print(\"a_sigma:\", a_sigma)\n tf.print(\"action_rand:\", action_rand)\n tf.print(\"action_probs_t:\", action_probs_t)\n \"\"\"\n\n # Store critic values\n values = values.write(t, tf.squeeze(value))\n\n # Store log probability of the action chosen\n actions = actions.write(t, action)\n action_probs = action_probs.write(t, action_probs_t)\n\n # Apply action to the environment to get next state and reward\n state, reward, done = tf_env_step(action)\n state.set_shape(initial_state_shape)\n\n # Store reward\n rewards = rewards.write(t, reward)\n\n if tf.cast(done, tf.bool):\n break\n\n actions = actions.stack() # list of action-mean\n action_probs = action_probs.stack() # list of action-sigma\n values = values.stack()\n rewards = rewards.stack()\n\n return actions, action_probs, values, rewards", "def objective(trial):\n %time\n env = gym.make('Delivery-v0')\n alpha = trial.suggest_discrete_uniform('alpha', 0.3,0.9,0.3)\n gamma = trial.suggest_discrete_uniform('gamma', 0.6, 1,0.1)\n epsilon = trial.suggest_discrete_uniform('epsilon', 0.01, 0.11, 0.04)\n episodes = 1000000\n \n # For plotting metrics\n all_epochs = []\n all_penalties = []\n rewards = []\n \n #Initialize Q table of 22500 x 8 size (22500 states and 8 actions) with all zeroes\n q_table = np.zeros([env.observation_space.n, env.action_space.n]) \n \n for i in range(1, episodes+1):\n state = env.reset()\n episode_rewards = []\n\n epochs, penalties, reward, = 0, 0, 0\n done = False\n\n while not done:\n if random.uniform(0, 1) < epsilon:\n action = env.action_space.sample() # Explore action space randomly\n else:\n action = np.argmax(q_table[state]) # Exploit learned values by choosing optimal values\n\n next_state, reward, done, info = env.step(action) \n\n old_value = q_table[state, action]\n next_max = np.max(q_table[next_state])\n\n new_value = (1 - alpha) * old_value + alpha * (reward + gamma * next_max)\n q_table[state, action] = new_value\n\n if reward == -10:\n penalties += 1\n \n\n state = next_state\n episode_rewards.append(reward)\n epochs += 1\n \n if done == True:\n break \n if epochs == 1000:\n break \n rewards.append(np.sum(episode_rewards))\n \n last_reward = np.mean(rewards)\n # trial.report(-1 * last_reward)\n\n return -1 * last_reward", "def get_reward(self):\n\t\t# returns the reward for current state\n\n\t\t#temporary line for testing:\n\t\t#return self.reward_idea() # also not working yet\n\n\t\tcost = self.cost_function()\n\t\tconstraints_violation = self.get_constraints_violation()\n\t\t#old_aug_cost = self.augmented_cost\n\t\t#new_aug_cost = self.get_augmented_cost(cost,constraints_violation)\n\t\t#reward = old_aug_cost - new_aug_cost # reward formula\n\t\t#self.augmented_cost = new_aug_cost # update augmented_cost\n\t\treward = -self.get_augmented_cost(cost,constraints_violation)\n\t\t#print(\"***\\nDEBUG cost: \" +str(cost)+\" constraints_violation: \"+str(constraints_violation))\n\t\t#print(\"DEBUG reward: \"+str(reward))\n\t\t'''\n\t\t#old idea that is probably bad and not necessary:\n\t\tif(self.step_count == 0):\n\t\t\t# old_aug_cost doesn't exist in first step... ACTUALLY IT DOES!\n\t\t\tprint(\"DEBUG step_count == 0, reward would be \"+str(reward))\n\t\t\tprint(\"DEBUG old_aug_cost: \"+str(old_aug_cost) + \" new_aug_cost: \"+str(new_aug_cost) )\n\t\t\treturn 0\n\t\t'''\n\n\t\treturn reward", "def evaluate(game, player):\n weights = [2, 200, 2000, 20000]\n reward = 0\n opponent = get_opponent(player)\n for length in range(2, 6):\n reward += weights[length - 2] * get_num_series(game, player, length)\n reward -= weights[length - 2] * get_num_series(game, opponent, length)\n return reward", "def reward_calc(self, reward_traj,V,V_end):\n r_all = np.concatenate((reward_traj,[V_end]),-1)\n V_all = V #np.concatenate((V,[V_end]),-1)\n delta = r_all[:-1] + self.gamma * V_all[1:] - V_all[:-1]\n \n adv = Generalized_Adv_Estimator.discounted_sum(delta,self.gamma*self.lam)\n rtg = adv + V_all[:-1]\n\n adv = adv.astype('float32')\n rtg = rtg.astype('float32')\n\n return adv, rtg", "def _get_reward(self, terminal):\n if not terminal:\n return 0\n\n folded_design, _ = fold(self.design.primary)\n hamming_distance = hamming(folded_design, self.target.dot_bracket)\n if 0 < hamming_distance < self._env_config.mutation_threshold:\n hamming_distance = self._local_improvement(folded_design)\n\n normalized_hamming_distance = hamming_distance / len(self.target)\n\n # For hparam optimization\n episode_info = EpisodeInfo(\n target_id=self.target.id,\n time=time.time(),\n normalized_hamming_distance=normalized_hamming_distance,\n )\n self.episodes_info.append(episode_info)\n\n return (1 - normalized_hamming_distance) ** self._env_config.reward_exponent", "def loss_function(self, q_vals, next_q_vals, rewards, actions, double_q_vals=None):\n with self.graph.as_default():\n with tf.name_scope('loss'):\n \"\"\"\n Calculate the target value(s)\n \"\"\"\n if double_q_vals is not None:\n # Select maximizing action using online network\n max_index = tf.argmax(double_q_vals, axis=1, output_type=tf.int32)\n indices = tf.stack([tf.range(0,self.batch_size), max_index], axis=-1)\n # Evaluate Q using target network\n next_q_acted = tf.gather_nd(next_q_vals, indices)\n else:\n # Select the maximum value of the next_q_vals: max_a Q(s_t+1,a)\n next_q_acted = tf.reduce_max(next_q_vals, axis=1)\n # y = r + gamma * max Q(s_t+1)\n target = tf.add_n([rewards, tf.scalar_mul(self.gamma, next_q_acted)], name='target_values')\n \"\"\"\n Retrieve the Q-value(s) of the given actions\n \"\"\"\n # Q(s_t,a_t)\n indices = tf.stack([tf.range(0,self.batch_size), actions], axis=-1)\n q_acted = tf.gather_nd(q_vals, indices)\n \"\"\"\n Calculate the loss: squared TD-error\n \"\"\"\n # This is the TD-error: y - Q(s_t,a_t)\n diff = tf.subtract(target, q_acted, name='TD_errors')\n # reduce-mean averages the negative and positive td-errors\n td_loss = tf.square(diff, name='squared_TD_errors')\n loss = tf.reduce_mean(td_loss)\n # Squared_TD_errors is the mean-squared-loss we want to minimize in training\n\n return loss, diff", "def get_reward(self, dags, entropies,inputs,targets):\n if not isinstance(entropies, np.ndarray):\n entropies = entropies.data.cpu().numpy()\n\n score=self.get_score(inputs,targets,dags)\n #score=1-self.get_loss(inputs,targets,dags)\n print(score.item())\n R = utils.to_item(score.data)\n\n if self.args.entropy_mode == 'reward':\n rewards = R + self.args.entropy_coeff * entropies.mean()\n elif self.args.entropy_mode == 'regularizer':\n rewards = R * np.ones_like(entropies)\n else:\n raise NotImplementedError(f'Unkown entropy mode: {self.args.entropy_mode}')\n\n return rewards", "def fake_reward_fn(obs, acts, next_obs, steps):\n pos, vel = obs[:, 0], obs[:, 1]\n return vel * 100 + pos", "def _compute_reward_(self):\n if self._target_type == \"position\":\n dist = np.linalg.norm(self._target_diff_, ord=2)\n if self._reward_type == \"linear\":\n reward_dist = -dist\n elif self._reward_type == \"precision\":\n reward_dist = -dist +\\\n np.exp( -dist**2 / 0.01)\n elif self._reward_type == \"sparse\":\n if dist < 0.05:\n reward_dist = 0\n else:\n reward_dist = -0.1\n\n elif self._target_type == \"angle\":\n dist = np.linalg.norm(self._target_diff_, ord=1)\n if self._reward_type == \"linear\":\n reward_dist = -dist\n elif self._reward_type == \"precision\":\n reward_dist = -dist +\\\n np.exp(-dist ** 2 / 0.01)\n elif self._reward_type == \"sparse\":\n raise NotImplementedError\n\n # TODO: doublecheck whether '0' or '-1' should be used as the index\n reward_vel = -self._vel_penalty * np.square(self._qd_[-1, self._joint_indices]).sum()\n\n #self.info['reward_dist'] = reward_dist\n #self.info['reward_vel'] = reward_vel\n\n return (reward_dist + reward_vel) * self._dt / 0.008", "def evaluate(agent_model, num_episodes=50):\n # This function will only work for a single Environment\n env = agent_model.get_env()\n all_episode_rewards = []\n max_action = []\n max_reward = -1000000000\n for i in range(num_episodes):\n episode_rewards = []\n done = False\n obs = env.reset()\n\n while not done:\n # _states are only useful when using LSTM policies\n\n action, _states = agent_model.predict(obs)\n # here, action, rewards and dones are arrays\n # because we are using vectorized env\n obs, reward, done, info = env.step(action)\n\n episode_rewards.append(reward)\n\n if reward > max_reward:\n max_action = action\n max_reward = reward\n\n\n\n all_episode_rewards.append(sum(episode_rewards))\n\n print(\"BEST ACTION: {}\".format(max_action))\n print(\"BEST REWARD: {}\".format(max_reward))\n\n mean_episode_reward = np.mean(all_episode_rewards)\n print(\"Mean reward:\", mean_episode_reward, \"Num episodes:\", num_episodes)\n\n return mean_episode_reward, all_episode_rewards", "def evaluate(self, environment, max_reward=1.0):\n episode_reward = 0.0\n state = environment.reset()\n\n for step_idx in range(self.max_episode_steps):\n reward, action_idx, new_state, is_done = environment.step(state, self)\n \n state = new_state\n episode_reward += reward\n\n if is_done or episode_reward >= max_reward:\n break\n\n self.fitness = episode_reward\n return episode_reward", "def _reward(self, action: Action) -> float:\n raise NotImplementedError", "def actor_critic_f(env, estimator_policy, estimator_value, num_episodes, discount_factor=1.0):\n\n # Keeps track of useful statistics\n stats = plotting.EpisodeStats(\n episode_lengths=np.zeros(num_episodes),\n episode_rewards=np.zeros(num_episodes)) \n\n Transition = collections.namedtuple(\"Transition\", [\"state\", \"action\", \"reward\", \"next_state\", \"done\"])\n\n max_return = 0\n\n for i in range(num_episodes):\n # Reset the environment and pick the fisrst action\n\n # set initial state (x, y, theta, x', y', theta')\n state = np.array([+7, 10, 0, 0, 20, 1.5], dtype=np.float32).reshape(6, 1)\n # state = np.array([+9, 1, 0, 0, 20, 0], dtype=np.float32).reshape(6, 1)\n action = np.array([0, 0], dtype=np.float32).reshape(2, 1)\n reward = 0\n env._reset(state)\n\n total_return = 0\n\n # One step in the environment\n for t in itertools.count():\n print \"{}-#{:03d} \".format(t, i+1),\n if t > 1000:\n break\n\n env._render({\n \"max_return\": max_return,\n \"total_return\": total_return\n })\n\n # Take a step\n mdp_state = form_mdp_state(state, action, reward)\n action = estimator_policy.predict(mdp_state)\n # action[0, 0] = 10\n # action[1, 0] = 1.9\n next_state, reward, done, _ = env.step(action)\n\n if total_return + reward < 0:\n reward = -500\n\n # Update statistics (minus 1 reward per step)\n total_return += reward\n\n if total_return > max_return:\n max_return = total_return\n\n # Calculate TD Target\n next_mdp_state = form_mdp_state(next_state, action, reward)\n value = estimator_value.predict(mdp_state)\n value_next = estimator_value.predict(next_mdp_state)\n td_target = reward + discount_factor * value_next\n td_error = td_target - value\n\n # Update the value estimator\n estimator_value.update(mdp_state, td_target)\n\n # Update the policy estimator (use td-error as advantage estimate)\n estimator_policy.update(mdp_state, td_error, action)\n\n # Print out which step we're on, useful for debugging.\n print \"action = [{:.2f}, {:.2f}]\".format(action[0,0], action[1,0]),\n print \"{:9.3f} (\\33[93m{:9.3f}\\33[0m)\".format(reward, total_return),\n print \"td_target (value) = {:5.2f} + {:5.2f} * {:5.2f} = {:5.2f}, value = {:5.2f}, td_error (policy) = {:5.2f}\".format(\n reward, discount_factor, value_next, td_target, value, td_error)\n\n if done or total_return < 0:\n break\n\n state = next_state\n \n stats.episode_rewards[i] = total_return\n stats.episode_lengths[i] = t\n\n return stats", "def _compute_reward(self):\n reward = 0.0\n return reward", "def act(self, observation):\n if np.random.random() >= self.epsilon:\n return np.argmax(self.expvalue)\n else:\n return np.random.randint(0, 9)", "def reward_estim(self, s):\n r_exp = s.dot(self.r)\n return r_exp", "def test_prop_reward(self):\n tmax = 10.0\n dt = 1.0\n\n reward_scale = 5.0\n\n ini_rate = 80.0\n\n tutor = SimpleNeurons(1, out_fct=lambda _: ini_rate+20.0)\n reward = MockReward(lambda t: 1.0 if t < tmax/2 else -1)\n tutor_rule = ReinforcementTutorRule(tutor, reward, tau=0,\n constrain_rates=False, ini_rate=ini_rate, learning_rate=1.0,\n use_tutor_baseline=False)\n\n sim1 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim1.run(tmax)\n\n drates1 = tutor_rule.rates - ini_rate\n\n tutor_rule.reset_rates()\n reward.reward_fct = lambda t: reward_scale if t < tmax/2 else -reward_scale\n\n sim2 = simulation.Simulation(tutor, reward, tutor_rule, dt=dt)\n sim2.run(tmax)\n\n drates2 = tutor_rule.rates - ini_rate\n\n self.assertLess(np.max(np.abs(reward_scale*drates1 - drates2)), 1e-6)", "def get_reward(self):\n #original reward function: reward = 1.-.3*(abs(self.sim.pose[:3] - self.target_pos)).sum()\n thrusts = self.sim.get_propeler_thrust(self.sim.prop_wind_speed)\n linear_forces = self.sim.get_linear_forces(thrusts)\n distance = np.linalg.norm(self.target_pos - self.sim.pose[:3])\n #speed = math.sqrt(np.square(self.sim.find_body_velocity()).sum())\n #with 300x300x300m env, the max distance from one corner to another is 519\n max_distance = 519\n #Focus quadcopter on not crashing but first rewarding an upward linear force until at the height of the target\n if self.sim.pose[2] < self.target_pos[2]:\n #velocity_discount = 1/speed\n reward = np.tanh(linear_forces[2])\n #after getting to the correct z-coordinate, move to the correct y-coordinate\n elif self.sim.pose[1] < self.target_pos[1]:\n #velocity_discount = 1/speed\n reward = 1 + np.tanh(linear_forces[1])\n #finally, after getting rewards for the x and y coordinates, give reward for distance\n #at this stage, the drone will have overshot the x and y coordinates, but it would be in a better area to\n #start searching for the x coordinate\n elif distance > 1 and self.sim.pose[2] > self.target_pos[2] and self.sim.pose[1] > self.target_pos[1] :\n reward = 2 + (1-math.pow((distance/300),.04))\n elif distance < 1:\n self.success = True\n reward = 100\n #possible reward for hover: np.exp(-np.square(linear_forces[2]))\n return reward", "def get_reward(self, observations, actions):\n\n #initialize and reshape as needed, for batch mode\n self.reward_dict = {}\n if len(observations.shape)==1:\n observations = np.expand_dims(observations, axis = 0)\n actions = np.expand_dims(actions, axis = 0)\n batch_mode = False\n else:\n batch_mode = True\n\n # obs:\n # self.obs_dict['robot_pos'], #24\n # self.obs_dict['object_position'], #3\n # self.obs_dict['object_orientation'], #3\n # self.obs_dict['object_velp'], #3\n # self.obs_dict['object_velr'], #3\n # self.obs_dict['desired_orientation'], #3\n\n #get vars\n obj_pos = observations[:, (24):(24)+3]\n obj_orientation = observations[:,(24+3):(24+3)+3]\n desired_orientation = observations[:,-3:]\n obj_height = observations[:,24+2]\n zeros = np.zeros(obj_height.shape)\n\n #orientation\n angle_diffs = np.linalg.norm(obj_orientation - desired_orientation, axis=1)\n\n #fall\n is_fall = zeros.copy()\n is_fall[obj_height < -0.1] = 1\n\n #done based on is_fall\n dones = (is_fall==1) if not self.startup else zeros\n\n #rewards\n self.reward_dict['ori_dist'] = -7*angle_diffs\n self.reward_dict['drop_penalty'] = -1000*is_fall\n self.reward_dict['r_total'] = self.reward_dict['ori_dist'] + self.reward_dict['drop_penalty']\n\n #return\n if not batch_mode:\n return self.reward_dict['r_total'][0], dones[0]\n return self.reward_dict['r_total'], dones", "def rtest_predictoutcome():\n\n #define cohort size\n npatients = 2\n\n #init healthy patients\n simulator = AbbcEnvironment(patients=npatients)\n\n #simulate healthy patients for long term in short term increments\n nstep = int(long_term/short_term)\n\n #define action taken : -1 means patients will be simulated as healthy\n action = np.repeat(-1, npatients)\n\n #init episode list\n episode = [simulator.state]\n\n #main simulation loop to generate episodes\n for step in range(nstep):\n episode += simulator.take_action(action=action, simtime=short_term)\n\n #episode length is 1+2*nstep consisting of intit state (5xnpat) followed by\n # next state and reward (1xnpat) repeating each time step.\n #print(episode)\n #print(len(episode))\n\n #---semi gradient temporal difference (0) algorithm ---\n #init hyperparameters\n alpha = .1 #learning rate\n #init Value function model\n agent = AbbcAgent(discount=1.0)\n #loop over episodes\n for patient in range(npatients):\n #state = [nstep]\n #state += episode[0][:,patient] #get inital state\n state = np.append(episode[0][:,patient],nstep).reshape((6,1)) #get inital state\n\n print(state)\n #loop over time steps in episode\n for k in range(1,nstep+1):\n #get next state and reward\n #nextstate = [nstep-k]\n #nextstate = episode[k*2-1][:,patient]\n nextstate = np.append(episode[k*2-1][:,patient],nstep-k).reshape((6,1))\n\n reward = episode[k*2][patient]\n\n #get magnitude for forces\n magnitude = alpha * (reward + agent.discount * agent.get_value(nextstate)\n - agent.get_value(state))\n #compute forces\n forces = computeforces(agent.prednet, state, 0, \"iden\")\n\n #update model\n for layer in forces:\n index = layer[\"layer\"]\n agent.prednet[index][\"weight\"] += magnitude * layer[\"fweight\"]\n agent.prednet[index][\"bias\"] += magnitude * layer[\"fbias\"]\n\n state = np.copy(nextstate)\n\n\n #make predictions\n state = np.append(episode[0][:,patient],nstep).reshape((6,1)) #get inital state\n print(agent.get_value(state))\n\n #Value function approximates outcome return at time horizon.\n assert(False)\n\n ##define action taken\n #action = np.repeat(2, npatients)\n ##main simulation loop\n #for step in range(nstep):\n # _, drugy_reward[step,:] = simulator.take_action(action=action, simtime=short_term)", "def att_neg_reward(state, election_results, electoral_votes, attack_list):\n return -538/51", "def update_policy(self, minibatch_size):\n \n steps = self.rewards.shape[0]\n batch_size = self.rewards.shape[0] * self.rewards.shape[1]\n #steps = 500\n #batch_size = 500\n #print(steps)\n #print(batch_size)\n \n # Compute advantages\n '''\n with torch.no_grad():\n if self.gae:\n advantages = torch.zeros_like(self.rewards).to(self.training_device)\n lastgaelam = 0\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n nextvalues = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t + 1]\n nextvalues = self.state_values[t + 1]\n delta = self.rewards[t] + self.gamma * nextvalues * nextnonterminal - self.state_values[t]\n advantages[t] = lastgaelam = delta + self.gamma * self.gae_lambda * nextnonterminal * lastgaelam\n returns = advantages + self.state_values\n else:\n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n ''' \n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n \n\n # flatten the batch\n #b_obs = self.states.reshape((-1,) + self.state_space)\n #print(self.states.shape)\n b_obs = self.states.reshape((-1,4)).detach()\n b_logprobs = self.action_probs.reshape(-1,1).detach()\n b_actions = self.actions.reshape((-1,)).detach()\n b_advantages = advantages.reshape(-1,1)\n b_returns = returns.reshape(-1,1)\n b_values = self.state_values.reshape(-1,1)\n \n # Optimize policy and value network for K epochs, run optimization in minibatches\n \n inds = np.arange(batch_size)\n for i_epoch_pi in range(self.epochs):\n np.random.shuffle(inds)\n for start in range(0, batch_size, minibatch_size):\n end = start + minibatch_size\n minibatch_ind = inds[start:end]\n mb_advantages = b_advantages[minibatch_ind]\n if self.norm_adv:\n mb_advantages = (mb_advantages - mb_advantages.mean()) / (mb_advantages.std() + 1e-8)\n \n #_, newlogproba, entropy = self.get_action(b_obs[minibatch_ind], b_actions[minibatch_ind])\n newlogproba, entropy = self.evaluate(b_obs[minibatch_ind], b_actions[minibatch_ind])\n #ratio = (newlogproba - b_logprobs[minibatch_ind]).exp()\n ratio = torch.exp((newlogproba - b_logprobs[minibatch_ind].detach()))\n \n # Stats\n approx_kl = (b_logprobs[minibatch_ind] - newlogproba).mean()\n\n # Policy loss\n pg_loss1 = -mb_advantages * ratio\n pg_loss2 = -mb_advantages * torch.clamp(ratio, 1 - self.clip_epsilon, 1 + self.clip_epsilon)\n pg_loss = torch.max(pg_loss1, pg_loss2).mean()\n entropy_loss = entropy.mean()\n\n # Value loss\n _, new_values = self.policy.forward(b_obs[minibatch_ind])\n if self.clip_vloss:\n \n v_loss_unclipped = self.MseLoss(new_values,b_returns[minibatch_ind])\n #v_loss_unclipped = ((new_values - b_returns[minibatch_ind]) ** 2)\n v_clipped = b_values[minibatch_ind] + torch.clamp(new_values - b_values[minibatch_ind],\n -self.clip_epsilon, self.clip_epsilon)\n #v_loss_clipped = (v_clipped - b_returns[minibatch_ind]) ** 2\n v_loss_clipped = self.MseLoss(v_clipped,b_returns[minibatch_ind])\n v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped)\n #v_loss = 0.5 * v_loss_max.mean()\n v_loss = 0.5 * v_loss_max\n else:\n #v_loss = 0.5 * ((new_values - b_returns[minibatch_ind]) ** 2).mean()\n v_loss = self.MseLoss(new_values,b_returns[minibatch_ind])\n\n loss = pg_loss + v_loss * self.vf_coeff - self.ent_coeff * entropy_loss\n\n self.optimizer.zero_grad()\n loss.backward()\n torch.nn.utils.clip_grad_norm_(self.policy.parameters(), self.max_grad_norm)\n self.optimizer.step()\n # Copy new weights into old policy:\n self.old_policy.load_state_dict(self.policy.state_dict())", "def update_model(\n self, experience: TensorTuple, demos: TensorTuple\n ) -> TensorTuple: # type: ignore\n exp_states, exp_actions, exp_rewards, exp_next_states, exp_dones = experience\n demo_states, demo_actions, demo_rewards, demo_next_states, demo_dones = demos\n\n states = torch.cat((exp_states, demo_states), dim=0)\n actions = torch.cat((exp_actions, demo_actions), dim=0)\n rewards = torch.cat((exp_rewards, demo_rewards), dim=0)\n next_states = torch.cat((exp_next_states, demo_next_states), dim=0)\n dones = torch.cat((exp_dones, demo_dones), dim=0)\n\n # G_t = r + gamma * v(s_{t+1}) if state != Terminal\n # = r otherwise\n masks = 1 - dones\n next_actions = self.actor_target(next_states)\n next_values = self.critic_target(torch.cat((next_states, next_actions), dim=-1))\n curr_returns = rewards + (self.hyper_params.gamma * next_values * masks)\n curr_returns = curr_returns.to(self.device)\n\n # critic loss\n gradient_clip_ac = self.hyper_params.gradient_clip_ac\n gradient_clip_cr = self.hyper_params.gradient_clip_cr\n\n values = self.critic(torch.cat((states, actions), dim=-1))\n critic_loss = F.mse_loss(values, curr_returns)\n\n # train critic\n self.critic_optim.zero_grad()\n critic_loss.backward()\n clip_grad_norm_(self.critic.parameters(), gradient_clip_cr)\n self.critic_optim.step()\n\n # policy loss\n actions = self.actor(states)\n policy_loss = -self.critic(torch.cat((states, actions), dim=-1)).mean()\n\n # bc loss\n pred_actions = self.actor(demo_states)\n qf_mask = torch.gt(\n self.critic(torch.cat((demo_states, demo_actions), dim=-1)),\n self.critic(torch.cat((demo_states, pred_actions), dim=-1)),\n ).to(self.device)\n qf_mask = qf_mask.float()\n n_qf_mask = int(qf_mask.sum().item())\n\n if n_qf_mask == 0:\n bc_loss = torch.zeros(1, device=self.device)\n else:\n bc_loss = (\n torch.mul(pred_actions, qf_mask) - torch.mul(demo_actions, qf_mask)\n ).pow(2).sum() / n_qf_mask\n\n # train actor: pg loss + BC loss\n actor_loss = (\n self.hyper_params.lambda1 * policy_loss\n + self.hyper_params.lambda2 * bc_loss\n )\n self.actor_optim.zero_grad()\n actor_loss.backward()\n clip_grad_norm_(self.actor.parameters(), gradient_clip_ac)\n self.actor_optim.step()\n\n # update target networks\n common_utils.soft_update(self.actor, self.actor_target, self.hyper_params.tau)\n common_utils.soft_update(self.critic, self.critic_target, self.hyper_params.tau)\n\n return actor_loss.item(), critic_loss.item(), n_qf_mask", "def get_current_reward(self, state):\n if state == 1:\n return 1.0 + self.rng.normal(scale=self.terminal_reward_stdev)\n else:\n return 0.0", "def update_reward(state, reward, max_reward, alpha=1, c=100, gamma=0.9):\n\t\t# update number of actions done so far to this state\n\t\tactions[state] = actions.get(state, 0.0) + 1.0\n\t\t# compute learning rate\n\t\talpha *= c / (c + actions[state])\n\t\trewards[state] = rewards.get(state, 0.0) + alpha*(reward+gamma*max_reward-rewards.get(state, 0.0))", "def learn(self, obs, action, reward, next_obs, terminal):\n pred_value = self.model(obs).gather(1, action)\n # model for selection actions.\n greedy_action = self.model(next_obs).max(dim=1, keepdim=True)[1]\n with torch.no_grad():\n # target_model for evaluation.\n max_v = self.target_model(next_obs).gather(1, greedy_action)\n target = reward + (1 - terminal) * self.gamma * max_v\n self.optimizer.zero_grad()\n loss = self.mse_loss(pred_value, target)\n loss.backward()\n self.optimizer.step()\n return loss.item()", "def loss(self, states, actions, next_states, rewards, discount_rate=.99):\n # TODO: implement this\n actions = tf.cast(actions, tf.int64)\n a = tf.stack([tf.range(states.shape[0],dtype=tf.int64), actions], axis=1)\n qVals = tf.gather_nd(self.call(states), a) # [batch_size] q-values for each action\n nextVals = tf.reduce_max(self.call(next_states), axis=1) # max of q-values [batch_size, num_actions] across num_actions\n targetVals = rewards + (discount_rate*nextVals)\n loss = tf.reduce_sum(tf.math.square(qVals - targetVals))\n return loss", "def compute_reward(self, obs, action, state):\n pass", "def compute_targets(rollout, action_space, last_r=0.0, gamma=0.9, lambda_=1.0):\n\n rollout = compute_advantages(rollout, last_r, gamma=gamma, lambda_=lambda_)\n rollout[\"adv_targets\"] = np.zeros((rollout.count, action_space.n))\n rollout[\"adv_targets\"][np.arange(rollout.count), rollout[\"actions\"]] = \\\n rollout[\"advantages\"]\n rollout[\"value_targets\"] = rollout[\"rewards\"].copy()\n rollout[\"value_targets\"][:-1] += gamma * rollout[\"vf_preds\"][1:]\n return rollout", "def get_reward(self):\n return self.calc_reward(self.sim.pose[:3], self.sim.v)", "def reward_func(sample_solution=None):\r\n\r\n # make sample_solution of shape [sourceL x batch_size x input_dim]\r\n sample_solution = tf.stack(sample_solution,0)\r\n\r\n sample_solution_tilted = tf.concat((tf.expand_dims(sample_solution[-1],0),\r\n sample_solution[:-1]),0)\r\n # get the reward based on the route lengths\r\n\r\n\r\n route_lens_decoded = tf.reduce_sum(tf.pow(tf.reduce_sum(tf.pow(\\\r\n (sample_solution_tilted - sample_solution) ,2), 2) , .5), 0)\r\n return route_lens_decoded", "def _compute_reward(self): \n reward = -1\n return reward", "def rmax(env, gamma, m, R_max, epsilon, num_episodes, max_step = 6):\n\n Q = np.ones((env.nS, env.nA)) * R_max / (1 - gamma)\n R = np.zeros((env.nS, env.nA))\n nSA = np.zeros((env.nS, env.nA))\n nSASP = np.zeros((env.nS, env.nA, env.nS))\n ########################################################\n # YOUR CODE HERE #\n ########################################################\n\n # Generate episodes\n average_scores = []\n accum = 0.0\n term = int(np.log(1 / (epsilon * (1 - gamma))) / (1 - gamma))\n for i in xrange(num_episodes):\n S = env.reset()\n done = False\n episode_reward = 0.0\n n_steps = 0\n\n while not done:\n\n if n_steps >= max_step:\n break\n\n A = np.argmax([Q[S,a] for a in range(env.nA)])\n\n # Make an action\n nextS, reward, done, _ = env.step(A)\n episode_reward += reward\n\n # R-Max\n if nSA[S, A] < m:\n nSA[S, A] += 1\n R[S, A] += reward\n nSASP[S, A, nextS] += 1\n\n if nSA[S, A] == m:\n for j in range(term):\n for S_bar in range(env.nS):\n for A_bar in range(env.nA):\n if nSA[S_bar, A_bar] >= m:\n N = float(nSA[S_bar, A_bar])\n T_hat = nSASP[S_bar, A_bar, :] / N\n R_hat = R[S_bar, A_bar] / N\n Q[S_bar, A_bar] = R_hat\n Q[S_bar, A_bar] += gamma * np.sum(T_hat * np.max(Q, axis=1))\n\n\n # Update Q-value\n S = nextS\n n_steps += 1\n\n accum += episode_reward\n average_scores.append(accum/(i+1))\n\n plt.plot(average_scores[:10000], label=\"m=%d\"%(m))\n\n ########################################################\n # END YOUR CODE #\n ########################################################\n return Q", "def logistic_regression(y, tx, initial_w, max_iters, gamma, debug = False):\n losses, ws = gradient_descent(y, tx, initial_w, max_iters, gamma, loss_f = model_logistic.loss, grad_f = model_logistic.grad, debug = debug)\n return get_last_ans(ws, losses)", "def actor_loss(q: torch.Tensor, target: ActorCritic, reward: torch.Tensor, next_state: torch.Tensor, next_action: torch.Tensor, done: bool, gamma: float):\n with torch.no_grad():\n next_action = target.p(next_state)\n next_q = target.q(next_state, next_action)\n backup = reward + gamma * (1 - done) * next_q\n q_loss = ((q - backup)**2).mean()\n return q_loss", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n def expHelper(gameState, deepness, agent):\n if agent >= gameState.getNumAgents():\n agent = 0\n deepness += 1\n if (deepness==self.depth or gameState.isWin() or gameState.isLose()):\n return self.evaluationFunction(gameState)\n elif (agent == 0):\n return maxFinder(gameState, deepness, agent)\n else:\n return expFinder(gameState, deepness, agent)\n \n def maxFinder(gameState, deepness, agent):\n output = [\"meow\", -float(\"inf\")]\n pacActions = gameState.getLegalActions(agent)\n if not pacActions:\n return self.evaluationFunction(gameState)\n for action in pacActions:\n currState = gameState.generateSuccessor(agent, action)\n currValue = expHelper(currState, deepness, agent+1)\n if type(currValue) is list:\n testVal = currValue[1]\n else:\n testVal = currValue\n if testVal > output[1]:\n output = [action, testVal] \n return output\n \n def expFinder(gameState, deepness, agent):\n output = [\"meow\", 0]\n ghostActions = gameState.getLegalActions(agent)\n if not ghostActions:\n return self.evaluationFunction(gameState)\n probability = 1.0/len(ghostActions)\n for action in ghostActions:\n currState = gameState.generateSuccessor(agent, action)\n currValue = expHelper(currState, deepness, agent+1)\n if type(currValue) is list:\n val = currValue[1]\n else:\n val = currValue\n output[0] = action\n output[1] += val * probability\n return output\n \n outputList = expHelper(gameState, 0, 0)\n return outputList[0]", "def reward(self,\n state: float) -> float:\n raise NotImplementedError", "def metrics_to_reward(self, accuracy, ge_at_10_percent, ge_at_50_percent, ge_no_to_0, trainable_params):\n max_reward = 3 # 2 from below line, 1 from ge_no_to_0\n\n # R = 0-0.5 + 0-1 + 0-0.5\n reward = (\n accuracy * .5 # 0-0.5\n + (128 - min(ge_at_10_percent, 128)) / 128 # 0-1\n + (128 - min(ge_at_50_percent, 128)) / (128 * 2) # 0-0.5\n )\n\n # The network was successful in the key recovery within the set amount of traces\n if ge_no_to_0 is not None and not math.isnan(ge_no_to_0):\n traces_per_attack = self.hyper_parameters.TRACES_PER_ATTACK + 1 # also reward ge of 0 in the max |traces|\n reward += (traces_per_attack - ge_no_to_0) / traces_per_attack # R += 0-1\n\n if self.reward_small:\n max_trainable_params = getattr(self.hyper_parameters, 'MAX_TRAINABLE_PARAMS_FOR_REWARD', 20_000_000)\n\n reward += max(0, (max_trainable_params - trainable_params) / max_trainable_params) # R += 0-1\n max_reward += 1\n\n return reward / max_reward", "def generate_returns(episode, gamma=0.9):\n len_episode = len(episode) # T = length of current episode\n epi_returns = np.zeros(len_episode)\n ############################\n # YOUR IMPLEMENTATION HERE #\n # HINT: Representing immediate reward as a vector and\n # using a vector of powers of gamma along with `np.dot` will\n # make this much easier to implement in a few lines of code.\n # You don't need to use this approach however and use whatever works for you. #\n\n\n episode_array = np.array(episode)\n master_reward_vector = episode_array[:, 2] # vector containing all the rewards from the episode, [r1, r2, ..., rT]\n master_gamma_vector = np.power(gamma, np.arange(1, len(master_reward_vector)+1)) # [gamma^0, gamma^1, gamma^2, ..., gamma^T]\n # print(\"master_reward_vector {0}\".format(master_reward_vector))\n # print(\"master_gamma_vector {0}\".format(master_gamma_vector))\n \n for i in range (0, len_episode):\n reward_vector = master_reward_vector[i:] # vector containing the last i rewards, [rk-i, ..., rk-1, rk]\n gamma_vector = master_gamma_vector[0:len_episode-i] # vector containing the first i powers of gamma, [gamma^0, gamma^1, ..., gamma^i]\n # print(\"reward_vector {0}, dim {1}\".format(reward_vector, reward_vector.shape))\n # print(\"gamma_vector {0}, dim {1}\".format(gamma_vector, gamma_vector.shape))\n epi_returns[i] = np.dot(gamma_vector, reward_vector) # [rk-i, ..., rk-1, rk] DOT [gamma^0, gamma^1, ..., gamma^i]\n # print(\"epi_returns[{0}] {1}\".format(i, epi_returns[i]))\n \n ############################\n # print(\"epi_returns{0}\".format(epi_returns))\n return epi_returns", "def get_loss(self, trajectories):\n # Use self.agent to replay the trajectories computation on the batch of trajectories\n replayed = replay_agent(self.agent, trajectories)\n\n info = trajectories.info\n trajectories = trajectories.trajectories\n\n # Compute the cumulated future reward\n reward = trajectories[\"_observation/reward\"]\n mask = trajectories.mask()\n reward = reward * mask\n max_length = trajectories.lengths.max().item()\n cumulated_reward = torch.zeros_like(reward)\n cumulated_reward[:, max_length - 1] = reward[:, max_length - 1]\n for t in range(max_length - 2, -1, -1):\n cumulated_reward[:, t] = (\n reward[:, t]\n + self.config[\"discount_factor\"] * cumulated_reward[:, t + 1]\n )\n\n # Compute reinforce loss\n action_probabilities = replayed[\"action_probabilities\"]\n action_distribution = torch.distributions.Categorical(action_probabilities)\n baseline = replayed[\"baseline\"].squeeze(-1)\n log_proba = action_distribution.log_prob(trajectories[\"action/action\"])\n reinforce_loss = log_proba * (cumulated_reward - baseline).detach()\n reinforce_loss = (reinforce_loss * mask).sum(1) / mask.sum(1)\n avg_reinforce_loss = reinforce_loss.mean()\n\n # Compute entropy loss\n entropy = action_distribution.entropy()\n entropy = (entropy * mask).sum(1) / mask.sum(1)\n avg_entropy = entropy.mean()\n\n # Compute baseline loss\n baseline_loss = (baseline - cumulated_reward) ** 2\n baseline_loss = (baseline_loss * mask).sum(1) / mask.sum(1)\n avg_baseline_loss = baseline_loss.mean()\n\n return DictTensor(\n {\n \"avg_reward\": cumulated_reward[:, 0].mean(),\n \"baseline_loss\": avg_baseline_loss,\n \"reinforce_loss\": avg_reinforce_loss,\n \"entropy_loss\": avg_entropy,\n }\n )", "def update(experience_buffer, returns):\n rewards = np.array(experience_buffer[2])\n discount_rewards = rewards * (FLAGS.GAMMA ** np.arange(len(rewards)))\n current_return = discount_rewards.sum()\n returns.append(current_return)\n returns = returns[-100:] # Get recent 100 returns.\n baseline = sum(returns) / len(returns) # Baseline is the average of 100 returns.\n sess.run(train_op, {observation_: experience_buffer[0],\n action_: experience_buffer[1],\n advantage_: current_return - baseline}) \n return returns", "def max_diffs(state):\n # your code here\n return best_action(state, pig_actions, Q_pig, win_diff)", "def target_m_dqn(model, target_network, states, next_states, actions,rewards, terminals, \n cumulative_gamma,tau,alpha,clip_value_min):\n \n #----------------------------------------\n q_state_values = jax.vmap(target_network, in_axes=(0))(states).q_values\n q_state_values = jnp.squeeze(q_state_values)\n \n next_q_values = jax.vmap(target_network, in_axes=(0))(next_states).q_values\n next_q_values = jnp.squeeze(next_q_values)\n #----------------------------------------\n\n tau_log_pi_next = stable_scaled_log_softmax(next_q_values, tau, axis=1)\n pi_target = stable_softmax(next_q_values,tau, axis=1)\n replay_log_policy = stable_scaled_log_softmax(q_state_values, tau, axis=1)\n\n #----------------------------------------\n \n replay_next_qt_softmax = jnp.sum((next_q_values-tau_log_pi_next)*pi_target,axis=1)\n\n replay_action_one_hot = nn.one_hot(actions, q_state_values.shape[-1])\n tau_log_pi_a = jnp.sum(replay_log_policy * replay_action_one_hot, axis=1)\n\n #a_max=1\n tau_log_pi_a = jnp.clip(tau_log_pi_a, a_min=clip_value_min,a_max=1)\n\n munchausen_term = alpha * tau_log_pi_a\n modified_bellman = (rewards + munchausen_term +cumulative_gamma * replay_next_qt_softmax *\n (1. - jnp.float32(terminals)))\n \n return jax.lax.stop_gradient(modified_bellman)", "def run_maxent(episode, weights):\n script = episode['script']\n for i, line in enumerate(script):\n line = line['text']\n episode['script'][i]['maxent_score'] = most_probable_class(line, weights)[0]", "def _reward(self, a):\r\n\r\n xrel = self._body_coord()[0] - self.goal\r\n dist = np.sum(xrel ** 2)\r\n return (\r\n - self.cx * dist / (np.sqrt(dist) + 1) - self.cu * np.sum(a ** 2)\r\n )", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n \" Max value \"\n def max_value(gameState, depth, alpha, beta):\n \n action = gameState.getLegalActions(0) \n if gameState.isWin() or gameState.isLose() or depth == self.depth:\n return (self.evaluationFunction(gameState), None)\n \n #initialize v to - infinity\n v = -(float(\"inf\"))\n for i in action:\n next_state = gameState.generateSuccessor(0, i)\n value, action = min_value(next_state, 1, depth, alpha, beta)\n \n if (v < value):\n v = value\n take_action = i\n\n if (v > beta):\n return (v, take_action)\n\n alpha = max(alpha, v)\n\n return (v, take_action)\n\n \n def min_value(gameState, agent, depth, alpha, beta):\n \n ghost_action = gameState.getLegalActions(agent) \n if len(ghost_action) == 0:\n return (self.evaluationFunction(gameState), None)\n\n #initialize v to +infinity\n v = float(\"inf\")\n \n\n for i in ghost_action:\n next_state = gameState.generateSuccessor(agent, i)\n ghost_no = gameState.getNumAgents() \n if (agent == ghost_no - 1):\n new_depth= depth+1\n value, action = max_value(next_state, new_depth, alpha, beta)\n else:\n new_agent= agent+1\n value, action = min_value(next_state, new_agent, depth, alpha, beta)\n \n if (value < v):\n v = value\n take_action = i\n\n if (v < alpha):\n return (v, take_action)\n\n beta = min(beta, v)\n\n return (v, take_action)\n\n alpha = -(float(\"inf\"))\n beta = float(\"inf\")\n final_value, final_action = max_value(gameState, 0, alpha, beta)\n return final_action", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n \n #print \"\\n\\n\\n\"\n \n def MaxValue(gameState, currentDepth, agentNumber):\n if currentDepth is self.depth or gameState.isWin() or gameState.isLose():\n #print \"\\t\", self.evaluationFunction(gameState)\n return (self.evaluationFunction(gameState), Directions.STOP)\n \n largestValue = float(\"-inf\")\n bestAction = Directions.STOP\n legalActions = gameState.getLegalActions(agentNumber)\n legalActions.sort()\n for action in legalActions:\n if action is Directions.STOP:\n continue\n successor = gameState.generateSuccessor(agentNumber, action)\n nextAgentNumber = (agentNumber + 1) % gameState.getNumAgents()\n if nextAgentNumber is 0:\n successorValue= MaxValue(successor, currentDepth + 1, nextAgentNumber)[0]\n else:\n successorValue = ExpValue(successor, currentDepth, (agentNumber + 1) % gameState.getNumAgents())[0]\n if(successorValue > largestValue):\n largestValue = successorValue\n bestAction = action\n return (largestValue, bestAction)\n \n def ExpValue(gameState, currentDepth, agentNumber):\n if currentDepth is self.depth or gameState.isWin() or gameState.isLose():\n #print \"\\t\", self.evaluationFunction(gameState)\n return (self.evaluationFunction(gameState), Directions.STOP)\n \n totalValue = 0\n legalActions = gameState.getLegalActions(agentNumber)\n legalActions.sort()\n for action in legalActions:\n successor = gameState.generateSuccessor(agentNumber, action)\n nextAgentNumber = (agentNumber + 1) % gameState.getNumAgents()\n if nextAgentNumber is 0:\n successorValue = MaxValue(successor, currentDepth + 1, nextAgentNumber)[0]\n else:\n successorValue = ExpValue(successor, currentDepth, nextAgentNumber)[0]\n totalValue += successorValue\n return (totalValue/len(legalActions), Directions.STOP)\n \n result= MaxValue(gameState, 0, 0)\n resultActionToTake =result[1]\n \n #print gameState.getLegalActions(0)\n #print 'AlphaBeta value for depth ', self.depth,' ',result[0]\n import time\n\n #print \"SCORE picked \", result[0]\n #time.sleep(1)\n #print 'This should always be true... ', resultActionToTake in gameState.getLegalActions(0)\n return resultActionToTake", "def compute_td_loss(states, actions, rewards, is_done,\r\n agent, target_network,\r\n gamma=0.99,\r\n device=device):\r\n states = torch.tensor(states, device=device, dtype=torch.float) # shape: [batch_size, *state_shape]\r\n\r\n # for some torch reason should not make actions a tensor\r\n actions = torch.tensor(actions, device=device, dtype=torch.long) # shape: [batch_size]\r\n rewards = torch.tensor(rewards, device=device, dtype=torch.float) # shape: [batch_size]\r\n # shape: [batch_size, *state_shape]\r\n is_done = torch.tensor(\r\n is_done.astype('float32'),\r\n device=device,\r\n dtype=torch.float\r\n ) # shape: [batch_size]\r\n is_not_done = 1 - is_done\r\n min_history_size = (len(actions) - 1) // 2\r\n\r\n agent_memories = agent.get_initial_state(1)\r\n target_memories = target_network.get_initial_state(1)\r\n\r\n agent_qvalues = []\r\n target_qvalues = []\r\n\r\n for t in range(len(actions)):\r\n\r\n agent_memories, predicted_agent_qvalues = agent(agent_memories,\r\n states[t].unsqueeze(0))\r\n target_memories, predicted_target_qvalues = target_network(target_memories,\r\n states[t].unsqueeze(0))\r\n\r\n if t >= min_history_size:\r\n agent_qvalues.append(predicted_agent_qvalues)\r\n target_qvalues.append(predicted_target_qvalues)\r\n\r\n if is_done[t]:\r\n agent_memories = agent.get_initial_state(1)\r\n target_memories = target_network.get_initial_state(1)\r\n\r\n agent_qvalues = torch.stack(agent_qvalues, dim=1).squeeze()\r\n target_qvalues = torch.stack(target_qvalues, dim=1).squeeze()[1:,:]\r\n\r\n agent_next_qvalues = agent_qvalues[1:,:]\r\n best_actions = torch.argmax(agent_next_qvalues, dim=1)\r\n\r\n predicted_qvalues_for_actions = agent_qvalues[range(\r\n len(actions[min_history_size:])), actions[min_history_size:]][:-1]\r\n next_state_values = target_qvalues[range(\r\n len(best_actions)), best_actions]\r\n\r\n target_qvalues_for_actions = rewards[min_history_size:-1] \\\r\n + gamma * is_not_done[min_history_size:-1] * next_state_values\r\n\r\n\r\n loss = torch.mean((predicted_qvalues_for_actions -\r\n target_qvalues_for_actions.detach()) ** 2)\r\n\r\n return loss", "def logistic_regression(y, tx, initial_w, max_iters, gamma):\r\n y_resize = (1+y)/2 #rescales target so that -1 values are changed to 0 \r\n w_list = [initial_w]\r\n loss_list = []\r\n w = initial_w\r\n \r\n for n_iter in range(max_iters):\r\n grad = calculate_gradient_LR(y_resize, tx, w)\r\n w = w - gamma * grad\r\n loss = compute_loss_LG(y_resize, tx, w)\r\n w_list.append(w)\r\n loss_list.append(loss)\r\n return w_list[-1],loss_list[-1]", "def _build_target_quantile_values_op(self):\n batch_size = tf.shape(self._replay.rewards)[0]\n ###### Munchausen-specific\n replay_action_one_hot = tf.one_hot(\n self._replay.actions, self.num_actions, 1., 0., name='action_one_hot')\n # tau * ln pi_k+1 (s')\n replay_next_log_policy = utils.stable_scaled_log_softmax(\n self._replay_next_target_q_values, self.tau, axis=1)\n # tau * ln pi_k+1(s)\n replay_log_policy = utils.stable_scaled_log_softmax(\n self._replay_target_q_values, self.tau, axis=1)\n replay_next_policy = utils.stable_softmax( # pi_k+1(s')\n self._replay_next_target_q_values, self.tau, axis=1)\n\n tau_log_pi_a = tf.reduce_sum( # ln pi_k+1(a|s)\n replay_log_policy * replay_action_one_hot, axis=1)\n\n tau_log_pi_a = tf.clip_by_value(\n tau_log_pi_a, clip_value_min=self.clip_value_min, clip_value_max=0)\n\n munchuasen_term = self.alpha * tau_log_pi_a\n #########\n\n # Shape of rewards: (num_tau_prime_samples x batch_size) x 1.\n rewards = self._replay.rewards[:, None] + munchuasen_term[Ellipsis, None]\n rewards = tf.tile(rewards, [self.num_tau_prime_samples, 1])\n\n is_terminal_multiplier = 1. - tf.cast(self._replay.terminals, tf.float32)\n # Incorporate terminal state to discount factor.\n # size of gamma_with_terminal: (num_tau_prime_samples x batch_size) x 1.\n gamma_with_terminal = self.cumulative_gamma * is_terminal_multiplier\n gamma_with_terminal = tf.tile(gamma_with_terminal[:, None],\n [self.num_tau_prime_samples, 1])\n\n # shape: (batch_size * num_tau_prime_samples) x num_actions\n replay_next_policy_ = tf.tile(replay_next_policy,\n [self.num_tau_prime_samples, 1])\n replay_next_log_policy_ = tf.tile(replay_next_log_policy,\n [self.num_tau_prime_samples, 1])\n\n # shape: (batch_size * num_tau_prime_samples) x 1\n replay_quantile_values = tf.reshape(\n self._replay_net_target_quantile_values,\n [batch_size * self.num_tau_prime_samples, self.num_actions])\n\n # shape: (batch_size * num_tau_prime_samples) x num_actions\n weighted_logits = (\n replay_next_policy_ * (replay_quantile_values\n - replay_next_log_policy_))\n\n # shape: (batch_size * num_tau_prime_samples) x 1\n target_quantile_values = tf.reduce_sum(weighted_logits, axis=1,\n keepdims=True)\n\n return rewards + gamma_with_terminal * target_quantile_values", "def step_maxL_gradient_descent(y, tx, w, gamma):\n loss=loss_maxL(y, tx, w)\n grad=calculate_maxL_gradient(y,tx,w)\n # update w by gradient\n w=w-gamma*grad\n return w, loss", "def calculateTarget(self, qValuesNewState, reward, isFinal):\n if isFinal:\n return reward\n else :\n return reward + self.discountFactor * self.getMaxQ(qValuesNewState)", "def _compute_reward(self, observations, done):\n raise NotImplementedError()", "def _compute_reward(self, observations, done):\n raise NotImplementedError()", "def _compute_reward(self, observations, done):\n raise NotImplementedError()", "def _compute_reward(self, observations, done):\n raise NotImplementedError()", "def _compute_reward(self, observations, done):\n raise NotImplementedError()", "def _compute_reward(self, observations, done):\n raise NotImplementedError()", "def compute_advantages(rollout, last_r, gamma=0.9, lambda_=1.0, use_gae=True):\n\n traj = {}\n trajsize = len(rollout[\"actions\"])\n for key in rollout:\n traj[key] = np.stack(rollout[key])\n\n if use_gae:\n assert \"vf_preds\" in rollout, \"Values not found!\"\n vpred_t = np.concatenate([rollout[\"vf_preds\"], np.array([last_r])])\n delta_t = traj[\"rewards\"] + gamma * vpred_t[1:] - vpred_t[:-1]\n # This formula for the advantage comes\n # \"Generalized Advantage Estimation\": https://arxiv.org/abs/1506.02438\n traj[\"advantages\"] = discount(delta_t, gamma * lambda_)\n traj[\"value_targets\"] = (\n traj[\"advantages\"] + traj[\"vf_preds\"]).copy().astype(np.float32)\n else:\n rewards_plus_v = np.concatenate(\n [rollout[\"rewards\"], np.array([last_r])])\n traj[\"advantages\"] = discount(rewards_plus_v, gamma)[:-1]\n # TODO(ekl): support using a critic without GAE\n traj[\"value_targets\"] = np.zeros_like(traj[\"advantages\"])\n\n traj[\"advantages\"] = traj[\"advantages\"].copy().astype(np.float32)\n\n assert all(val.shape[0] == trajsize for val in traj.values()), \\\n \"Rollout stacked incorrectly!\"\n return SampleBatch(traj)", "def update(self, state, action, nextState, reward):\n \"\"\"Description:\n Use Q-Learning algoritm in slide 58 of MDP\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n maxQns = self.getValue(nextState) # get max q-value of next state\n if maxQns == None:\n maxQns = 0\n Qsa = self.getQValue(state, action) #self.qValues[(state, action)]\n difference = reward + self.discountRate * maxQns - Qsa\n self.qValues[(state, action)] += self.alpha * difference\n \n self.vitCount[(state, action)] += 1\n \"\"\" END CODE \"\"\"", "def reward_amt(value, reward_vec, adj, softmax_inv_temp, discount, start_prob=None):\n n = len(reward_vec)\n softmax_value = np.exp(softmax_inv_temp * value) / np.sum(np.exp(softmax_inv_temp*value))\n policy = adj * softmax_value.reshape(1, -1)\n policy = util.l1_normalize_rows(policy)\n sr = np.linalg.pinv(np.eye(n) - discount * policy)\n value = np.dot(sr, reward_vec.reshape(-1, 1)).reshape(-1)\n if start_prob is None:\n start_prob = np.ones(n)*1. / n\n else:\n start_prob = start_prob.reshape(n)\n return np.sum(value * start_prob)", "def evaluate(env, model):\n episode_rewards = []\n for _ in range(10):\n reward_sum = 0\n done = False\n obs = env.reset()\n while not done:\n action, _states = model.predict(obs)\n obs, reward, done, info = env.step(action)\n reward_sum += reward\n episode_rewards.append(reward_sum)\n return np.mean(episode_rewards)", "def act(self, observation):\n if np.random.random() < self.epsilon:\n return np.random.randint(0,9)\n else:\n return np.argmax(self.values)", "def act(self, observation):\n if np.random.random() < self.epsilon:\n return np.random.randint(0,9)\n else:\n return np.argmax(self.values)", "def compute_reward(self, obs, action, state):\n return self._reward_func(obs, action), state", "def get_target_distribution(\n next_states, rewards, mask, gamma, target_estimator, support\n):\n bsz = rewards.shape[0]\n bsz_ = next_states.shape[0]\n bin_no = support.shape[0]\n v_min, v_max = support[0].item(), support[-1].item()\n delta_z = (v_max - v_min) / (bin_no - 1)\n\n probs = target_estimator(next_states, probs=True)\n qs = torch.mul(probs, support.expand_as(probs))\n argmax_a = qs.sum(2).max(1)[1].unsqueeze(1).unsqueeze(1)\n action_mask = argmax_a.expand(bsz_, 1, bin_no)\n _qa_probs = probs.gather(1, action_mask).squeeze()\n\n # Next-states batch can be smaller so we scatter qa_probs in\n # a tensor the size of the full batch with each row summing to 1\n qa_probs = torch.eye(bsz, bin_no, device=_qa_probs.device)\n qa_probs.masked_scatter_(mask.expand_as(qa_probs), _qa_probs)\n\n # Mask gamma and reshape it torgether with rewards to fit p(x,a).\n rewards = rewards.expand_as(qa_probs)\n gamma = (mask.float() * gamma).expand_as(qa_probs)\n\n # Compute projection of the application of the Bellman operator.\n bellman_op = rewards + gamma * support.unsqueeze(0).expand_as(rewards)\n bellman_op = torch.clamp(bellman_op, v_min, v_max)\n\n # Compute categorical indices for distributing the probability\n m = torch.zeros(bsz, bin_no, device=qa_probs.device)\n b = (bellman_op - v_min) / delta_z\n l = b.floor().long()\n u = b.ceil().long()\n\n # Fix disappearing probability mass when l = b = u (b is int)\n l[(u > 0) * (l == u)] -= 1\n u[(l < (bin_no - 1)) * (l == u)] += 1\n\n # Distribute probability\n \"\"\"\n for i in range(bsz):\n for j in range(self.bin_no):\n uidx = u[i][j]\n lidx = l[i][j]\n m[i][lidx] = m[i][lidx] + qa_probs[i][j] * (uidx - b[i][j])\n m[i][uidx] = m[i][uidx] + qa_probs[i][j] * (b[i][j] - lidx)\n for i in range(bsz):\n m[i].index_add_(0, l[i], qa_probs[i] * (u[i].float() - b[i]))\n m[i].index_add_(0, u[i], qa_probs[i] * (b[i] - l[i].float()))\n \"\"\"\n # Optimized by https://github.com/tudor-berariu\n offset = (\n torch.linspace(0, ((bsz - 1) * bin_no), bsz, device=qa_probs.device)\n .long()\n .unsqueeze(1)\n .expand(bsz, bin_no)\n )\n\n m.view(-1).index_add_(\n 0, (l + offset).view(-1), (qa_probs * (u.float() - b)).view(-1)\n )\n m.view(-1).index_add_(\n 0, (u + offset).view(-1), (qa_probs * (b - l.float())).view(-1)\n )\n return m, probs", "def _sparse_reward(self) -> float:\n # `score_on_end_of_traj` is supposed to be called at the end of a\n # trajectory but we use it here since it gives us exactly the reward\n # we're looking for.\n return self.score_on_end_of_traj()", "def calculate_return(list_of_reward, gamma):\n G = 0\n for r in reversed(list_of_reward):\n G = gamma * G + r\n\n return G", "def compute_intrinsic_reward(self, state, action, next_state, use_cuda, train=False):\n if use_cuda:\n fn = lambda x: x.cuda()\n device = \"gpu\"\n else:\n fn = lambda x: x.cpu()\n device = \"cpu\"\n if not self.predictor_dev == device:\n self.predictor_model = fn(self.predictor_model)\n self.predictor_dev = device\n if not self.target_dev == device:\n self.target_model = fn(self.target_model)\n self.target_dev = device\n\n target_feature = self.target_model(next_state)\n predict_feature = self.predictor_model(next_state)\n\n forward_loss = ((target_feature - predict_feature) ** 2).sum(-1).mean()\n self.loss = forward_loss\n\n if train:\n self.optimizer.zero_grad()\n self.loss.backward(retain_graph=True)\n torch.nn.utils.clip_grad_norm_(self.predictor_model.parameters(), 0.5)\n self.optimizer.step()\n\n return self.eta * forward_loss", "def return_state_utility(v, T, u, reward, gamma):\n action_array = np.zeros(4)\n for action in range(0, 4):\n action_array[action] = np.sum(np.multiply(u, np.dot(v, T[:, :, action])))\n return reward + gamma * np.max(action_array)", "def gen_outcome(alpha, delta, beta, win_counters, attempt_counters, h_features_win, h_features_att):\n return logistic(alpha+delta+beta+np.sum(np.log(1+np.array(attempt_counters))*np.array(h_features_att))+\\\n np.sum(np.log(1+np.array(win_counters))*np.array(h_features_win)))", "def logistic_regression_gradient_descent(y, tx, initial_w, max_iters, gamma):\n\tw = initial_w\n\n\tfor iter in range(max_iters):\n\t\tw = learning_by_gradient_descent(y, tx, w, gamma)\n\n\treturn w", "def reg_logistic_regression(y, tx, lambda_ , initial_w, max_iters, gamma):\n \n # Define parameters to store w and loss\n ws = [initial_w]\n losses = []\n w = initial_w\n y = (y + 1) / 2 # [-1, 1] -> [0, 1]\n \n for n_iter in range(max_iters):\n # computes gradient and loss\n\n grad = compute_gradient_log(y, tx, w)+2*lambda_*np.linalg.norm(w)\n loss = compute_loss_log(y, tx, w)+ lambda_*(np.linalg.norm(w)**2)\n\n #updates w\n\n w = w - gamma * grad\n # store w and loss\n\n ws.append(w)\n losses.append(loss)\n #print(\"regularised logistic regression: Gradient Descent({bi}/{ti}): loss={l}\".format(\n # bi=n_iter, ti=max_iters - 1, l=loss, w0=w[0], w1=w[1]), end=\"\\r\")\n return w, loss", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n def expValue(gameState, agent, depth):\n expVal = 0\n successors = [gameState.generateSuccessor(agent, action) for action in gameState.getLegalActions(agent)]\n if agent == gameState.getNumAgents() - 1:\n depth += 1\n agent = 0\n else: \n agent += 1\n p = 1.0 / len(successors)\n for successor in successors:\n expVal += p * value(successor, agent, depth)\n return expVal\n\n def maxValue(gameState, agent, depth):\n maxVal = -float('inf')\n successors = [gameState.generateSuccessor(agent, action) for action in gameState.getLegalActions(agent)]\n agent = 1\n for successor in successors:\n maxVal = max(maxVal, value(successor, agent, depth))\n return maxVal\n\n def value(gameState, agent, depth):\n if gameState.isWin() or gameState.isLose() or depth == self.depth:\n return self.evaluationFunction(gameState)\n if agent == 0:\n return maxValue(gameState, agent, depth)\n else:\n return expValue(gameState, agent, depth)\n\n legalMoves = gameState.getLegalActions(0)\n successors = [gameState.generateSuccessor(0, action) for action in legalMoves]\n scores = [value(successor, 1, 0) for successor in successors]\n bestScore = max(scores)\n bestIndices = [index for index in range(len(scores)) if scores[index] == bestScore]\n chosenIndex = bestIndices[0]\n return legalMoves[chosenIndex]" ]
[ "0.6783421", "0.6575619", "0.646106", "0.64347804", "0.63848096", "0.63675916", "0.63455665", "0.6339959", "0.63341564", "0.6327008", "0.63198864", "0.6301124", "0.62738556", "0.627058", "0.62632287", "0.6244055", "0.62065756", "0.61828387", "0.61823547", "0.61772585", "0.6173983", "0.61669296", "0.61538064", "0.61484027", "0.6127061", "0.61233616", "0.61059535", "0.60943824", "0.6087218", "0.60869104", "0.60853606", "0.6082793", "0.60758567", "0.6071549", "0.606899", "0.60601974", "0.60476196", "0.6043982", "0.6031975", "0.6028424", "0.6019558", "0.6011241", "0.5985463", "0.5979874", "0.5972838", "0.5971176", "0.5965352", "0.5965314", "0.5963718", "0.5962597", "0.59464157", "0.5941079", "0.59399045", "0.5938429", "0.59179205", "0.5915579", "0.59105825", "0.59069866", "0.5906959", "0.59020007", "0.5893103", "0.5884849", "0.58827627", "0.5871812", "0.5870475", "0.5868657", "0.58661735", "0.5864056", "0.585915", "0.5851948", "0.58369744", "0.5836456", "0.5832366", "0.5824907", "0.5821709", "0.58169746", "0.5815967", "0.58089924", "0.58056897", "0.58056897", "0.58056897", "0.58056897", "0.58056897", "0.58056897", "0.58033884", "0.58021575", "0.579606", "0.579111", "0.57874966", "0.57874966", "0.5778526", "0.57743335", "0.5770822", "0.5764475", "0.5760264", "0.5755405", "0.5750724", "0.5747549", "0.57460856", "0.57399774" ]
0.65270555
2
Unfolds a tensor T into a matrix, taking the dimension "dim" of T as the first dimension of the matrix, and flattening all the other dimensions into the other one dimension of the matrix. dim starts from 0.
def unfold(T, dim): Tm=np.moveaxis(T, dim, 0) return Tm.reshape(T.shape[dim],-1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flatten(tensor):\n C = tensor.size(1)\n # new axis order\n axis_order = (1, 0) + tuple(range(2, tensor.dim()))\n # Transpose: (N, C, D, H, W) -> (C, N, D, H, W)\n transposed = tensor.permute(axis_order)\n # Flatten: (C, N, D, H, W) -> (C, N * D * H * W)\n return transposed.contiguous().view(C, -1)", "def _flatten(self, inputT, size):\n return tf.reshape(inputT, (-1, size))", "def flatten(tensor):\n # number of channels\n C = tensor.size(1)\n # new axis order\n axis_order = (1, 0) + tuple(range(2, tensor.dim()))\n # Transpose: (N, C, D, H, W) -> (C, N, D, H, W)\n transposed = tensor.permute(axis_order)\n # Flatten: (C, N, D, H, W) -> (C, N * D * H * W)\n return transposed.contiguous().view(C, -1)", "def flatten(x_tensor):\n import numpy as np\n #print(x_tensor)\n\n shape = x_tensor.get_shape().as_list() # a list: [None, height, width, channels]\n dim = np.prod(shape[1:]) # dim = prod(height,width,channels) \n flattened_tensor = tf.reshape(x_tensor, [-1, dim]) # -1 means \"all\"\n #print(flattened_tensor)\n return flattened_tensor", "def flatten(x_tensor):\n old_shape = x_tensor.get_shape().as_list()\n new_shape = [-1, old_shape[1] * old_shape[2] * old_shape[3]]\n return tf.reshape(x_tensor, new_shape)", "def flatten(x_tensor):\n # TODO: Implement Function\n \n shape = x_tensor.get_shape().as_list()\n reshape = tf.reshape(x_tensor, (-1, shape[1] * shape[2] * shape[3]))\n \n return reshape", "def flatten(x_tensor):\n # TODO: Implement Function\n b, w, h, d = x_tensor.get_shape().as_list()\n img_size = w * h * d\n return tf.reshape(x_tensor, [-1, img_size])", "def flatten(x):\n all_dims_exc_first = np.prod([v.value for v in x.get_shape()[1:]])\n o = tf.reshape(x, [-1, all_dims_exc_first])\n return o", "def reshape_to_matrix(input_tensor):\n ndims = input_tensor.shape.ndims\n if ndims < 2:\n raise ValueError(\"Input tensor must have at least rank 2. Shape = %s\" %\n (input_tensor.shape))\n if ndims == 2:\n return input_tensor\n\n width = input_tensor.shape[-1]\n output_tensor = tf.reshape(input_tensor, [-1, width])\n return output_tensor", "def reshape_to_matrix(input_tensor):\n ndims = input_tensor.shape.ndims\n if ndims < 2:\n raise ValueError(\"Input tensor must have at least rank 2. Shape = %s\" %\n (input_tensor.shape))\n if ndims == 2:\n return input_tensor\n\n width = input_tensor.shape[-1]\n output_tensor = tf.reshape(input_tensor, [-1, width])\n return output_tensor", "def reshape_to_matrix(self, input_tensor):\n ndims = input_tensor.shape.ndims\n if ndims < 2:\n raise ValueError(\"Input tensor must have at least rank 2.\"\n \"Shape = %s\" % (input_tensor.shape))\n if ndims == 2:\n return input_tensor\n\n width = input_tensor.shape[-1]\n output_tensor = tf.reshape(input_tensor, [-1, width])\n return output_tensor", "def reshape_to_matrix(input_tensor):\n ndims = input_tensor.shape.ndims\n if ndims < 2:\n raise ValueError(\"Input tensor must have at least rank 2. Shape = %s\" %\n (input_tensor.shape))\n if ndims == 2:\n return input_tensor\n \n width = input_tensor.shape[-1]\n output_tensor = tf.reshape(input_tensor, [-1, width])\n return output_tensor", "def layer_flatten(x_tensor):\n return tf.reshape(\n x_tensor,\n [-1, (x_tensor.shape[1] * x_tensor.shape[2] * x_tensor.shape[3]).value]\n )", "def flatten(x_tensor):\n # TODO: Implement Function\n return tf.contrib.layers.flatten(x_tensor)", "def unfold(tensor, mode):\n return np.moveaxis(tensor, mode, 0).reshape((tensor.shape[mode], -1))", "def reshape_0(tensor):\n row = tf.shape(tensor)[0]\n og_shape = tensor.get_shape().as_list()\n shape_list = [row, og_shape[1], og_shape[2], 1]\n out = tf.reshape(tensor, shape_list)\n return out", "def reshape(tensor):\n row = tf.shape(tensor)[0]\n shape_list = [row, -1]\n out = tf.reshape(tensor, shape_list)\n return out", "def _unmerge_beam_dim(tensor: Any, batch_size: int, beam_size: int) ->Any:\n if not isinstance(tensor, torch.Tensor):\n return tensor\n shape = list(tensor.size())\n new_shape = [batch_size] + [beam_size] + shape[1:]\n return tensor.view(tuple(new_shape))", "def flatten(x, name=\"flatten\"):\n all_dims_exc_first = np.prod([v.value for v in x.get_shape()[1:]])\n o = tf.reshape(x, [-1, all_dims_exc_first], name=name)\n return o", "def flatten(x, ndim=1, name='Flatten'):\n with tf.name_scope(name, values=[x]):\n shape = x.get_shape()\n total_dim = len(shape)\n\n if total_dim == ndim:\n return x\n elif total_dim < ndim:\n raise ValueError('Attempt to flatten \"x\" to %r dimensions, but \"x\" '\n 'only has %r dimensions.' % (ndim, total_dim))\n\n if shape.is_fully_defined():\n # all the dimensions are fixed, thus we can use the static shape.\n shape = shape.as_list()[:ndim - 1] + [-1]\n else:\n # the shape is dynamic, so we have to generate a dynamic flatten\n # shape.\n shape = tf.concat(0, [tf.shape(x)[:ndim - 1], [-1]])\n\n return tf.reshape(x, shape)", "def flatten(x_tensor):\n with tf.name_scope('input_reshape'):\n x = x_tensor.get_shape().as_list()[1]\n y = x_tensor.get_shape().as_list()[2]\n z = x_tensor.get_shape().as_list()[3]\n image_shaped_input = tf.reshape(x_tensor, [-1, x*y*z])\n return image_shaped_input", "def tensor_train_matrix(tensor, rank, svd=\"truncated_svd\", verbose=False):\n order = tl.ndim(tensor)\n n_input = order // 2 # (n_output = n_input)\n\n if tl.ndim(tensor) != n_input * 2:\n msg = \"The tensor should have as many dimensions for inputs and outputs, i.e. order should be even \"\n msg += f\"but got a tensor of order tl.ndim(tensor)={order} which is odd.\"\n raise ValueError(msg)\n\n in_shape = tl.shape(tensor)[:n_input]\n out_shape = tl.shape(tensor)[n_input:]\n\n if n_input == 1:\n # A TTM with a single factor is just a matrix...\n return TTMatrix([tensor.reshape(1, in_shape[0], out_shape[0], 1)])\n\n new_idx = list(\n [\n idx\n for tuple_ in zip(range(n_input), range(n_input, 2 * n_input))\n for idx in tuple_\n ]\n )\n new_shape = list([a * b for (a, b) in zip(in_shape, out_shape)])\n tensor = tl.reshape(tl.transpose(tensor, new_idx), new_shape)\n\n factors = tensor_train(tensor, rank, svd=svd, verbose=verbose).factors\n for i in range(len(factors)):\n factors[i] = tl.reshape(\n factors[i], (factors[i].shape[0], in_shape[i], out_shape[i], -1)\n )\n\n return TTMatrix(factors)", "def flatten_reshape(variable):\n dim = 1\n for d in variable.get_shape()[1:].as_list():\n dim *= d\n return tf.reshape(variable, shape=[-1, dim])", "def infer_leading_dims(tensor, dim):\n lead_dim = tensor.dim() - dim\n assert lead_dim in (0, 1, 2)\n if lead_dim == 2:\n T, B = tensor.shape[:2]\n else:\n T = 1\n B = 1 if lead_dim == 0 else tensor.shape[0]\n shape = tensor.shape[lead_dim:]\n return lead_dim, T, B, shape", "def _unsqueeze_ft(tensor):\n return tensor.unsqueeze(0).unsqueeze(-1)", "def unsqueeze_ft(tensor):\n return tensor.unsqueeze(0).unsqueeze(-1)", "def batch_flatten(this,x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def batch_flatten(this,x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def batch_flatten(x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def batch_flatten(x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def flatten(self, x_tensor):\n shape = x_tensor.get_shape().as_list()\n assert len(shape) >= 4, 'shape of image is not correct'\n single_image_dimension = shape[1] * shape[2] * shape[3]\n x_tensor = tf.reshape(x_tensor, [-1, single_image_dimension])\n return x_tensor\n # return tf.contrib.layers.flatten(x_tensor)", "def flatten(x):\n return reshape(x, (x.shape[0], -1))", "def beinflumatred(infl_mat):\n shape_mat = np.shape(infl_mat)\n len_mat = shape_mat[0] * shape_mat[1]\n reduced_infl_mat = np.zeros((len_mat, len_mat))\n counter = 0\n for i in range(0, shape_mat[0]):\n for j in range(0, shape_mat[1]):\n reduced_infl_mat[counter, :] = \\\n np.reshape(infl_mat[i, j, :, :], len_mat)\n counter += 1\n return reduced_infl_mat", "def ttm(t, m, k):\n\n dim_list = [] # initialize a list to save dimension index to transpose the tensor reshapped from 2D matrix\n shape_list = [] # initialize a list to save the dimensions to reshape 2D matrix back to tensor\n total_dim = len(t.shape)\n for i in range(total_dim):\n dim_list.append((k - i) % total_dim)\n shape_list.append(t.shape[(k - i) % total_dim])\n dim_order = tuple(dim_list)\n shape_list[0] = m.shape[0]\n\n t_unfold = unfold_axis(t, k)\n t_mul = np.matmul(m, t_unfold)\n t_mul = np.reshape(t_mul,tuple(shape_list))\n t_mul = np.transpose(t_mul, dim_order)\n\n return t_mul", "def restore_leading_dims(tensors, lead_dim, T=1, B=1):\n is_seq = isinstance(tensors, (tuple, list))\n tensors = tensors if is_seq else (tensors,)\n if lead_dim == 2: # (Put T dim.)\n tensors = tuple(t.view((T, B) + t.shape[1:]) for t in tensors)\n if lead_dim == 0: # (Remove B=1 dim.)\n assert B == 1\n tensors = tuple(t.squeeze(0) for t in tensors)\n return tensors if is_seq else tensors[0]", "def lift_to_dimension(A,dim):\n\n current_dim = len(A.shape)\n if current_dim>dim:\n raise ValueError('Can only add dimensions, but not remove them')\n\n if current_dim==dim:\n return A\n else:\n return A.reshape([1]*(dim-current_dim)+list(A.shape))", "def _merge_beam_dim(tensor: Any) ->Any:\n if not isinstance(tensor, torch.Tensor):\n return tensor\n shape = list(tensor.size())\n shape[0] *= shape[1]\n shape.pop(1)\n return tensor.view(tuple(shape))", "def _flatten(prev_layer):\n\n with tf.name_scope('flatten'):\n shape = int(np.prod(prev_layer.get_shape()[1:]))\n return tf.reshape(prev_layer, [-1, shape])", "def flatten(X):\n N = X.shape[-1]\n flat = np.zeros((N, 3072))\n for idx, i in enumerate(range(N)):\n # if not idx:\n # print(X[:,:,:,i].reshape(3072))\n flat[i] = X[:,:,:,i].reshape(3072)\n return flat", "def _flat(D):\n if issparse(D):\n raise ValueError(\"Cannot flatten sparse matrix.\")\n d_flat = np.array(D).flatten()\n return d_flat", "def gram_matrix(input_tensor):\r\n\r\n temp = tf.squeeze(input_tensor)\r\n return tf.matmul(temp, tf.transpose(temp))", "def _flatten_batch(self, matrix_tups):\n out_vecs = []\n for t in matrix_tups:\n for v in t:\n new_shape = (v.shape[0],)\n if len(v.shape) > 1:\n new_shape = new_shape + (np.prod(v.shape[1:]),)\n out_vecs.append(v.reshape(new_shape))\n return jnp.concatenate(out_vecs, axis=1)", "def tucker_to_unfolded(tucker_tensor, mode=0, skip_factor=None, transpose_factors=False):\n return unfold(tucker_to_tensor(tucker_tensor, skip_factor=skip_factor, transpose_factors=transpose_factors), mode)", "def squeeze_expand_dim(tensor, axis):\n tensor = torch.squeeze(tensor)\n if len(list(tensor.size())) < 4:\n return tensor.unsqueeze(axis)\n else:\n return tensor", "def matrix_units(dim):\n return [_np.reshape(unit_vector(a, dim**2), (dim, dim)) for a in range(dim**2)]", "def flatten_matrix(matrix):\n\n vector = matrix.flatten(1)\n vector = vector.reshape(1, len(vector))\n return vector", "def batch_diag_part(in_tensor, batch_size):\n tensor_list = tf.split(split_dim=0, num_split=batch_size, value=in_tensor)\n tensor_list = [tf.expand_dims(tf.diag_part(tf.squeeze(t, [0])), 0) for t in tensor_list]\n return tf.concat(0, tensor_list)", "def expand_dim_for_tensor_list(tensor_list, dim_array):\n res_tensor_list = []\n for tensor in tensor_list:\n res_tensor = tensor\n for dim in dim_array:\n res_tensor = tf.expand_dims(res_tensor, dim)\n res_tensor_list.append(res_tensor)\n\n return res_tensor_list", "def unfold_axis(data, k):\n\n target_dim = k\n total_dim = len(data.shape)\n\n dim_list = []\n for i in range(total_dim):\n dim_list.append((target_dim - i) % total_dim)\n dim_order = tuple(dim_list)\n\n data_unfold = np.transpose(data,dim_order)\n data_unfold = np.reshape(data_unfold,[data.shape[k],int(data.size/data.shape[k])])\n return data_unfold", "def _unroll_block_matrix(mat1: tf.Tensor) -> tf.Tensor:\n n_dim, m1, n1 = mat1.get_shape().as_list()\n mat1_rsh = tf.reshape(mat1, [n_dim, m1, 1, n1])\n mat2 = tf.eye(n_dim, dtype=tf.float64)\n mat2_rsh = tf.reshape(mat2, [n_dim, 1, n_dim, 1])\n return tf.reshape(mat1_rsh * mat2_rsh, [n_dim * m1, n_dim * n1])", "def _expand_dims(st, axis):\n if not isinstance(st, structured_tensor.StructuredTensor):\n return tf.expand_dims(st, axis)\n nn_axis = _expand_dims_nonnegative_axis(axis, st.rank)\n if st.rank == 0:\n return _expand_dims_scalar(st)\n if nn_axis == 0:\n # Here, we can add a dimension 1 at the front.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(nrows, nrows))\n elif nn_axis == 1:\n # Again, by partitioning the first dimension into vectors of length 1,\n # we can solve this problem.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(\n tf.constant(1, dtype=nrows.dtype), nrows))\n else:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Unimplemented: non-negative axis > 1 for _expand_dims\")", "def transpose(x: torch.Tensor, dims):\n _dims = list(dims)\n for i in range(len(_dims)):\n if _dims[i] != i:\n x = x.transpose(i, _dims[i])\n j = _dims.index(i)\n _dims[i], _dims[j] = i, _dims[i]\n return x", "def test_flatten3D():\n with tf.Session().as_default():\n tensor2d = tf.constant([[1, 2, 3]], dtype=tf.float32)\n with pytest.raises(AssertionError):\n output = flatten3D(tensor2d)\n\n tensor3d = tf.constant([[[1, 2, 3]]], dtype=tf.float32)\n assert tensor3d == flatten3D(tensor3d)\n\n init_shape = (3, 17, 23, 3, 5)\n expected_shape = (3, 17, 23*3*5)\n\n tensor5d = tf.constant(np.arange(0, np.prod(init_shape)).reshape(init_shape), tf.int32)\n assert tensor5d.eval().shape == init_shape\n output = flatten3D(tensor5d)\n assert output.eval().shape == expected_shape", "def flatten(inputs, is_batched=True, scope=None):\n with tf.name_scope(scope, 'flatten'):\n shape = get_shape(inputs)\n if is_batched:\n num_units = np.prod(shape[1:])\n return tf.reshape(inputs, [-1, num_units])\n else:\n num_units = np.prod(shape)\n return tf.reshape(inputs, [num_units])", "def mean_flat(tensor):\n return tensor.mean(dim=list(range(1, len(tensor.shape))))", "def tile(x: Tensor, count: int, dim=0) -> Tensor:\n if isinstance(x, tuple):\n h, c = x\n return tile(h, count, dim=dim), tile(c, count, dim=dim)\n\n perm = list(range(len(x.size())))\n if dim != 0:\n perm[0], perm[dim] = perm[dim], perm[0]\n x = x.permute(perm).contiguous()\n out_size = list(x.size())\n out_size[0] *= count\n batch = x.size(0)\n x = (\n x.view(batch, -1)\n .transpose(0, 1)\n .repeat(count, 1)\n .transpose(0, 1)\n .contiguous()\n .view(*out_size)\n )\n if dim != 0:\n x = x.permute(perm).contiguous()\n return x", "def tf_flatten(x):\n return tf.contrib.layers.flatten(x)", "def inflate(tensor, times, dim):\n repeat_dims = [1] * tensor.dim()\n repeat_dims[dim] = times\n return tensor.repeat(*repeat_dims)", "def flatten(x):\n return x.view(x.size(0), -1)", "def unstack(S):\n\tB, I, J, C, M, _ = S.shape\n\tT = S.reshape(B, I*J, C*M*M).permute(0,2,1)\n\treturn F.fold(T, (I*M, J*M), M, stride=M)", "def tensor_ring(input_tensor, rank, mode=0, verbose=False):\n rank = validate_tr_rank(tl.shape(input_tensor), rank=rank)\n n_dim = len(input_tensor.shape)\n\n # Change order\n if mode:\n order = tuple(range(mode, n_dim)) + tuple(range(mode))\n input_tensor = tl.transpose(input_tensor, order)\n rank = rank[mode:] + rank[:mode]\n\n tensor_size = input_tensor.shape\n\n factors = [None] * n_dim\n\n # Getting the first factor\n unfolding = tl.reshape(input_tensor, (tensor_size[0], -1))\n\n n_row, n_column = unfolding.shape\n if rank[0] * rank[1] > min(n_row, n_column):\n raise ValueError(\n f\"rank[{mode}] * rank[{mode + 1}] = {rank[0] * rank[1]} is larger than \"\n f\"first matricization dimension {n_row}ร—{n_column}.\\n\"\n \"Failed to compute first factor with specified rank. \"\n \"Reduce specified ranks or change first matricization `mode`.\"\n )\n\n # SVD of unfolding matrix\n U, S, V = tl.partial_svd(unfolding, rank[0] * rank[1])\n\n # Get first TR factor\n factor = tl.reshape(U, (tensor_size[0], rank[0], rank[1]))\n factors[0] = tl.transpose(factor, (1, 0, 2))\n if verbose is True:\n print(\"TR factor \" + str(mode) + \" computed with shape \" + str(factor.shape))\n\n # Get new unfolding matrix for the remaining factors\n unfolding = tl.reshape(S, (-1, 1)) * V\n unfolding = tl.reshape(unfolding, (rank[0], rank[1], -1))\n unfolding = tl.transpose(unfolding, (1, 2, 0))\n\n # Getting the TR factors up to n_dim - 1\n for k in range(1, n_dim - 1):\n\n # Reshape the unfolding matrix of the remaining factors\n n_row = int(rank[k] * tensor_size[k])\n unfolding = tl.reshape(unfolding, (n_row, -1))\n\n # SVD of unfolding matrix\n n_row, n_column = unfolding.shape\n current_rank = min(n_row, n_column, rank[k + 1])\n U, S, V = tl.partial_svd(unfolding, current_rank)\n rank[k + 1] = current_rank\n\n # Get kth TR factor\n factors[k] = tl.reshape(U, (rank[k], tensor_size[k], rank[k + 1]))\n\n if verbose is True:\n print(\n \"TR factor \"\n + str((mode + k) % n_dim)\n + \" computed with shape \"\n + str(factors[k].shape)\n )\n\n # Get new unfolding matrix for the remaining factors\n unfolding = tl.reshape(S, (-1, 1)) * V\n\n # Getting the last factor\n prev_rank = unfolding.shape[0]\n factors[-1] = tl.reshape(unfolding, (prev_rank, -1, rank[0]))\n\n if verbose is True:\n print(\n \"TR factor \"\n + str((mode - 1) % n_dim)\n + \" computed with shape \"\n + str(factors[-1].shape)\n )\n\n # Reorder factors to match input\n if mode:\n factors = factors[-mode:] + factors[:-mode]\n\n return TRTensor(factors)", "def reshape_from_matrix(output_tensor, orig_shape_list):\n if len(orig_shape_list) == 2:\n return output_tensor\n\n output_shape = get_shape_list(output_tensor)\n\n orig_dims = orig_shape_list[0:-1]\n width = output_shape[-1]\n\n return tf.reshape(output_tensor, orig_dims + [width])", "def flatten_inputs(self, inputs):\n ndim = inputs.ndim\n if ndim == 2:\n return inputs\n elif ndim == 4:\n # Maybe add a check\n inputs_reshaped = inputs.ravel().reshape((self._learning_batch_size,\n self._input_size)).T\n return inputs_reshaped\n else:\n raise Exception('Wrong inputs dimension : it should be a matrix or a 4D tensor')", "def Stirling2Matrix(dim): \r\n mat_space = MatrixSpace(CombinatorialScalarRing(),dim)\r\n l = list()\r\n for row in range(dim):\r\n l.append(_stirling2_row(row,dim))\r\n return mat_space(l)", "def reshape_from_matrix(output_tensor, orig_shape_list):\n if len(orig_shape_list) == 2:\n return output_tensor\n\n output_shape = get_shape_list(output_tensor)\n\n orig_dims = orig_shape_list[0:-1]\n width = output_shape[-1]\n\n return tf.reshape(output_tensor, orig_dims + [width])", "def tdim(dim):\n transformers = [Quantize()]\n tdim = TransformedDimension(Compose(transformers, dim.type), dim)\n return tdim", "def transform(tensor):\n L, W, D = tensor.shape\n return tensor.transpose(1, 0, 2).reshape(W, L*D).mean(axis=0)", "def unstack_batch(tensor_dict):\n # # extract tensor from tuple. TODO: figure out where box tuple comes from?\n for key in tensor_dict.keys():\n if key == \"gt_boxes\":\n tensor_dict[\"gt_boxes\"] = tensor_dict[\"gt_boxes\"][0]\n unbatched_tensor_dict = {key: tf.unstack(tensor) for key, tensor in tensor_dict.items()}\n # remove padding along 'num_boxes' dimension of the gt tensors\n num_gt_list = unbatched_tensor_dict[\"num_gt_boxes\"]\n unbatched_unpadded_tensor_dict = {}\n for key in unbatched_tensor_dict:\n if key == \"num_gt_boxes\":\n continue\n unpadded_tensor_list = []\n for num_gt, padded_tensor in zip(num_gt_list, unbatched_tensor_dict[key]):\n tensor_shape = shape_utils.combined_static_and_dynamic_shape(padded_tensor)\n slice_begin = tf.zeros(len(tensor_shape), dtype=tf.int32)\n slice_size = tf.stack([num_gt] + [-1 if dim is None else dim for dim in tensor_shape[1:]])\n unpadded_tensor = tf.slice(padded_tensor, slice_begin, slice_size)\n unpadded_tensor_list.append(unpadded_tensor)\n unbatched_unpadded_tensor_dict[key] = unpadded_tensor_list\n return unbatched_unpadded_tensor_dict", "def _flatten_and_concat(x, batch_shape, dtype):\n # For convenience.\n if x is None:\n return x\n\n def _reshape_part(part):\n part = tf.cast(part, dtype)\n new_shape = ps.concat(\n [batch_shape, [-1]],\n axis=-1,\n )\n return tf.reshape(part, ps.cast(new_shape, tf.int32))\n\n x = tf.nest.map_structure(_reshape_part, x)\n return tf.concat(tf.nest.flatten(x), axis=-1)", "def flatten_layers(data):\n return data.reshape((data.shape[0], data.shape[1], -1))", "def _flatten_outer_dims(logits):\n rank = array_ops.rank(logits)\n last_dim_size = array_ops.slice(\n array_ops.shape(logits), [math_ops.subtract(rank, 1)], [1])\n output = array_ops.reshape(logits, array_ops.concat([[-1], last_dim_size], 0))\n\n # Set output shape if known.\n if not context.executing_eagerly():\n shape = logits.get_shape()\n if shape is not None and shape.dims is not None:\n shape = shape.as_list()\n product = 1\n product_valid = True\n for d in shape[:-1]:\n if d is None:\n product_valid = False\n break\n else:\n product *= d\n if product_valid:\n output_shape = [product, shape[-1]]\n output.set_shape(output_shape)\n\n return output", "def flat_shuffle(tensor):\n shape_ = tensor.size()\n flat_tensor = tensor.view(-1)\n shuffled_flat_tensor = shuffle(flat_tensor)\n return shuffled_flat_tensor.view(shape_)", "def gather_from_all(tensor: torch.Tensor) -> torch.Tensor:\n if tensor.ndim == 0:\n # 0 dim tensors cannot be gathered. so unsqueeze\n tensor = tensor.unsqueeze(0)\n\n if is_distributed_training_run():\n tensor, orig_device = convert_to_distributed_tensor(tensor)\n gathered_tensors = GatherLayer.apply(tensor)\n gathered_tensors = [\n convert_to_normal_tensor(_tensor, orig_device)\n for _tensor in gathered_tensors\n ]\n else:\n gathered_tensors = [tensor]\n gathered_tensor = torch.cat(gathered_tensors, 0)\n return gathered_tensor", "def flatten(self, input_layer):\n # Note: This ensures the output order matches that of NHWC networks\n input_layer = self._to_nhwc(input_layer)\n input_shape = input_layer.get_shape().as_list()\n num_inputs = input_shape[1]*input_shape[2]*input_shape[3]\n return tf.reshape(input_layer, [-1, num_inputs], name='flatten')", "def unflatten(self, x):\n dims = [c.flat_dim for c in self.spaces]\n flat_x = np.split(x, np.cumsum(dims)[:-1])\n return tuple(c.unflatten(xi) for c, xi in zip(self.spaces, flat_x))", "def mean_flat(tensor):\n return tensor.mean(axis=list(range(1, len(tensor.shape))))", "def _reshape_like(mat: Tensor, shape: Tuple[int]) -> Tensor:\n return mat.reshape(-1, *shape)", "def tensoras(tensor):\r\n\r\n if pytorch.is_dense(tensor):\r\n m = tensor.detach().cpu().numpy()\r\n if m.ndim == 0:\r\n m = m.item()\r\n elif pytorch.is_sparse(tensor):\r\n m = pytorch.sparse_tensor_to_sparse_adj(tensor)\r\n elif gg.TF_ENABLED and tensorflow.is_dense(tensor):\r\n m = tensor.numpy()\r\n elif gg.TF_ENABLED and tensorflow.is_sparse(tensor):\r\n m = tensorflow.sparse_tensor_to_sparse_adj(tensor)\r\n elif isinstance(tensor, np.ndarray) or sp.isspmatrix(tensor):\r\n m = tensor.copy()\r\n else:\r\n m = np.asarray(tensor)\r\n return m", "def _expand(x, ndim, axis=0):\n while F.rank(x) < ndim:\n x = F.expand_dims(x, axis)\n return x", "def sparse_dim_multiply(\n A: Tensor,\n x: Tensor,\n dim: int\n) -> Tensor:\n idx = A._indices()[dim]\n vals = A._values()\n vals *= x[idx]\n return A", "def squeeze_batch_dim(nest: types.NestedTensor) -> types.NestedTensor:\n return tree.map_structure(lambda x: tf.squeeze(x, axis=0), nest)", "def dense_to_sparse(self, tensor: tf.Tensor) -> tf.Tensor:\n tensor_shape = tensor.shape\n expand_dims = len(tensor_shape) == 3\n\n tensor = tf.gather_nd(tf.reshape(tensor, (-1, 1)), self.observations_index)\n if expand_dims:\n tensor = tf.expand_dims(tensor, axis=-1)\n return tensor", "def make_result_matrix(T):\n result_matrix = []\n # Uniform sampled distribution\n distribution = np.random.choice([1, 0], T, p=[.1, .9])\n place_holder = np.random.randn(T)\n place_holder[distribution] = np.nan # Masking\n\n # This block is to un-flatten the 25 element matrix into a 5*5 matrix\n for j in range(T):\n temp = []\n for i in range(T):\n temp.append(place_holder[i])\n result_matrix.append(temp)\n\n result_matrix = np.array(result_matrix)\n\n return result_matrix", "def irtranspose(x: torch.Tensor, dims):\n _dims = list(dims)\n _ir_dims = [_dims.index(i) for i in range(len(_dims))]\n return transpose(x, _ir_dims)", "def permute2st(v, ndim_en=1):\n nd = v.ndim\n return v.transpose([*range(-ndim_en, 0)] + [*range(nd - ndim_en)])", "def _make_flatten_uflatten(g_td, y_train):\n output_dimension = y_train.shape[-1]\n\n def fl(fx):\n \"\"\"Flatten outputs.\"\"\"\n return np.reshape(fx, (-1,))\n\n def ufl(fx):\n \"\"\"Unflatten outputs.\"\"\"\n return np.reshape(fx, (-1, output_dimension))\n\n if y_train.size > g_td.shape[-1]:\n out_dim, ragged = divmod(y_train.size, g_td.shape[-1])\n if ragged or out_dim != output_dimension:\n raise ValueError('The batch size of `y_train` must be the same as the'\n ' last dimension of `g_td`')\n fl = lambda x: x\n ufl = lambda x: x\n return fl, ufl", "def flatten(resp):\n with tf.name_scope(\"rsa-flatten\"):\n resp = tf.convert_to_tensor(resp)\n rshape = tf.shape(resp)\n return tf.reshape(resp, [rshape[0], tf.reduce_prod(rshape[1:])])", "def matT(mat):\n shape=matShape(mat)\n return [[matGet(mat,y,x) for y in range(shape[0])] \\\n for x in range(shape[1])]", "def expand(tensor_var, size):\r\n # Corner case that I might use in an optimization\r\n if size == 0:\r\n return tensor_var\r\n shapes = [tensor_var.shape[x] for x in xrange(tensor_var.ndim)]\r\n zeros_shape = [size + shapes[0]] + shapes[1:]\r\n empty = tensor.zeros(zeros_shape,\r\n dtype=tensor_var.dtype)\r\n return tensor.set_subtensor(empty[:shapes[0]], tensor_var)", "def inverse_flatten_concat(flat_vector, original_structure):\n location, split_tensors = 0, []\n for orig_t in tf.nest.flatten(original_structure):\n length = tf.size(orig_t)\n split_vector = tf.slice(flat_vector, [location], [length])\n split_tensors.append(tf.reshape(split_vector, orig_t.shape))\n location += length\n return tf.nest.pack_sequence_as(original_structure, split_tensors)", "def inflate_tensor(X, times):\n sizes = X.size()\n\n if X.dim() == 1:\n X = X.unsqueeze(1)\n\n repeat_times = [1] * X.dim()\n repeat_times[1] = times\n X = X.repeat(*repeat_times).view(-1, *sizes[1:])\n return X", "def trans(array,dim):\n return array[filter(lambda x: x != dim,range(len(array)) ) ]", "def _hadamard_step(x, dim):\n x_shape = x.shape.as_list()\n x = tf.reshape(x, [-1, 2]) # Reshape so that we have a matrix.\n x = tf.matmul(x, h_core) # Multiply.\n x = tf.reshape(x, [-1, dim // 2, 2]) # Reshape to rank-3.\n x = tf.transpose(x, perm=permutation) # Swap last two dimensions.\n x.set_shape(x_shape) # Failed shape inference in tf.while_loop.\n return x", "def unstack(field: Field, dim: str) -> tuple:\n size = field.shape.get_size(dim)\n if isinstance(size, Tensor):\n size = math.min(size) # unstack StaggeredGrid along x or y\n return tuple([field[{dim: i}] for i in range(size)])", "def unflatten_beams(x):\n unflat_shape = [batch_size, self.num_beams] + x.shape.as_list()[1:]\n return tf.reshape(x, shape=unflat_shape)", "def mesh_unflatten(x, vertice_size):\r\n verticeXdims, N = x.shape\r\n assert 3*vertice_size*N == verticeXdims*N, \"wrong shape\"\r\n x = x.reshape(3, vertice_size, N)\r\n x = np.transpose(x, axes=[2,1,0])\r\n\r\n return x", "def flatten_image(x):\n *batch_shape, h, w, c = x.shape\n return x.reshape((*batch_shape, h * w * c))", "def _flatten(self, matrix_tups):\n out_vecs = [v.flatten() for t in matrix_tups for v in t]\n return jnp.concatenate(out_vecs)", "def flatten_concat(structure):\n flattened_as_list = []\n for x in tf.nest.flatten(structure):\n with tf.control_dependencies([tf.debugging.assert_rank_at_least(x, 1)]):\n flattened_as_list.append(tf.reshape(x, [-1]))\n return tf.concat(flattened_as_list, axis=0)", "def _eager_reshape(tensor, shape, ctx):\n attr_t = tensor._datatype_enum() # pylint: disable=protected-access\n attr_tshape, (shape,) = execute.args_to_matching_eager(\n [shape], ctx, [dtypes.int32, dtypes.int64], dtypes.int32)\n inputs_flat = [tensor, shape]\n attrs = (\"T\", attr_t, \"Tshape\", attr_tshape)\n [result] = execute.execute(\n b\"Reshape\", 1, inputs=inputs_flat, attrs=attrs, ctx=ctx)\n return result" ]
[ "0.7043688", "0.6872559", "0.6866148", "0.6625046", "0.6595738", "0.6544334", "0.6462534", "0.6318924", "0.63155955", "0.6307684", "0.6298893", "0.62986493", "0.62389565", "0.6198749", "0.6193831", "0.6192542", "0.6138287", "0.60814816", "0.6069293", "0.6062363", "0.60362655", "0.5983049", "0.59763235", "0.5965072", "0.5923629", "0.5920312", "0.591454", "0.591454", "0.5905981", "0.5905981", "0.59048545", "0.5809867", "0.5808721", "0.5805008", "0.57745576", "0.5753311", "0.573564", "0.56880873", "0.5651077", "0.5622706", "0.5579041", "0.55606526", "0.5553546", "0.554068", "0.55315346", "0.5523394", "0.54724056", "0.5453219", "0.54432756", "0.54182863", "0.5405508", "0.5402124", "0.54014724", "0.53915995", "0.5377296", "0.537096", "0.53632957", "0.53540665", "0.53525645", "0.53505564", "0.5345933", "0.53446555", "0.5342334", "0.5330817", "0.5314894", "0.5302909", "0.52989244", "0.5292298", "0.52906424", "0.52903545", "0.5269742", "0.5267637", "0.526757", "0.5259295", "0.52497053", "0.5241738", "0.5217114", "0.5216479", "0.5211218", "0.52026856", "0.52000046", "0.51687515", "0.5148151", "0.5139127", "0.5135571", "0.5135063", "0.5130806", "0.5130546", "0.51298743", "0.5110285", "0.50939876", "0.50914603", "0.50727874", "0.50601137", "0.5050212", "0.5049268", "0.50427777", "0.503684", "0.5012268", "0.50072104" ]
0.81962144
0
nMode product of a tensor T and a matrix M, the summation is made along the nth dim. definition in paper "A MULTILINEAR SINGULAR VALUE DECOMPOSITION" by LIEVEN DE LATHAUWER , BART DE MOOR , AND JOOS VANDEWALLE For example, n with value 0, 1, or 2, would specify the 1st, 2nd or 3rd dim of the tensor T. For the matrix M, this function always take the second dimension, as if to multiply T by M on the left side.
def nModeProduct(T, M, n): P=tensordot(T, M, axes=([n], [1])) return np.rollaxis(P, len(T.shape)-1, n)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ttm(t, m, k):\n\n dim_list = [] # initialize a list to save dimension index to transpose the tensor reshapped from 2D matrix\n shape_list = [] # initialize a list to save the dimensions to reshape 2D matrix back to tensor\n total_dim = len(t.shape)\n for i in range(total_dim):\n dim_list.append((k - i) % total_dim)\n shape_list.append(t.shape[(k - i) % total_dim])\n dim_order = tuple(dim_list)\n shape_list[0] = m.shape[0]\n\n t_unfold = unfold_axis(t, k)\n t_mul = np.matmul(m, t_unfold)\n t_mul = np.reshape(t_mul,tuple(shape_list))\n t_mul = np.transpose(t_mul, dim_order)\n\n return t_mul", "def xn_xn_prod(self,x_n):\n \n x_n_tiled =T.tile(x_n,(self.num_vars,1))\n \n return T.transpose(x_n_tiled)*x_n_tiled", "def matrixMultiplication(self, n, id, context):\n\n print(\"id: {}\".format(id))\n # Create one matrix\n f = 1\n m1 = []\n for x in range(n):\n row = []\n for y in range(n):\n row.append(f)\n f = f + 1\n m1.append(row)\n # The second matrix is equal to the first matrix\n m2 = m1\n print(\"m2: {}\".format(m2))\n\n # Multiply matrices\n m3 = []\n for i in range(n):\n row = []\n for j in range(n):\n sum = 0\n for k in range(n):\n sum = sum + m1[i][k] * m2[k][j]\n row.append(sum)\n m3.append(row)\n\n sum = 0\n # add the entries\n for i in range(n):\n for j in range(n):\n sum = sum + m3[i][j]\n\n print(\"Result of multiplication is {}\".format(sum))\n return sum", "def multiply(m, n):\n if n == 1:\n return m\n else:\n return m + multiply(m, n - 1)", "def product_on_basis(self, t1, t2):\n return tensor( (module.monomial(x1)*module.monomial(x2) for (module, x1, x2) in zip(self._sets, t1, t2)) ) #.", "def prod(tensor, axis=None):\n raise NotImplementedError", "def matrix_power(M, n):\n if n < 0:\n M = pinv(M)\n n = abs(n)\n\n # Shortcuts when 0 < n <= 3\n if n == 0:\n return at.eye(M.shape[-2])\n\n elif n == 1:\n return M\n\n elif n == 2:\n return tm.dot(M, M)\n\n elif n == 3:\n return tm.dot(tm.dot(M, M), M)\n\n result = z = None\n\n while n > 0:\n z = M if z is None else tm.dot(z, z)\n n, bit = divmod(n, 2)\n if bit:\n result = z if result is None else tm.dot(result, z)\n\n return result", "def op(M, N):\n\n return M ^ N", "def sum(n):\n times = lambda x: jnp.sum(x, keepdims=True, axis=0)\n trans = lambda x: jnp.repeat(x, n, axis=0)\n return Operator(times=times, trans=trans, shape=(1,n))", "def tucker_mode_dot(tucker_tensor, matrix_or_vector, mode, keep_dim=False, copy=False):\n shape, rank = _validate_tucker_tensor(tucker_tensor)\n core, factors = tucker_tensor\n contract = False\n \n if tl.ndim(matrix_or_vector) == 2: # Tensor times matrix\n # Test for the validity of the operation\n if matrix_or_vector.shape[1] != shape[mode]:\n raise ValueError(\n 'shapes {0} and {1} not aligned in mode-{2} multiplication: {3} (mode {2}) != {4} (dim 1 of matrix)'.format(\n shape, matrix_or_vector.shape, mode, shape[mode], matrix_or_vector.shape[1]\n ))\n\n elif tl.ndim(matrix_or_vector) == 1: # Tensor times vector\n if matrix_or_vector.shape[0] != shape[mode]:\n raise ValueError(\n 'shapes {0} and {1} not aligned for mode-{2} multiplication: {3} (mode {2}) != {4} (vector size)'.format(\n shape, matrix_or_vector.shape, mode, shape[mode], matrix_or_vector.shape[0]\n ))\n if not keep_dim:\n contract = True # Contract over that mode\n else:\n raise ValueError('Can only take n_mode_product with a vector or a matrix.')\n \n if copy:\n factors = [tl.copy(f) for f in factors]\n core = tl.copy(core)\n\n if contract:\n print('contracting mode')\n f = factors.pop(mode)\n core = mode_dot(core, tl.dot(matrix_or_vector, f), mode=mode)\n else:\n factors[mode] = tl.dot(matrix_or_vector, factors[mode]) \n\n return core, factors\n #return TuckerTensor(core, factors)", "def mdot(*args):\n r = args[0]\n for a in args[1:]:\n r = N.dot(r,a)\n return r", "def scalar_mult(n, m):\n\ttemp = []\n\tfor i in range(len(m)):\n\t\te = []\n\t\tfor j in range(len(m[0])):\n\t\t\te.append(m[i][j]*n)\n\t\ttemp.append(e)\n\treturn temp", "def f(t, x, n, v):\n total = 0\n for i in range(n+1):\n for j in range(n+1):\n for k in range(v):\n total = t[i][j] * x[i][j][k]", "def term(M, n):\n M *= (1. / n * (b**n - a**n)) # Prevent unnecessary copying\n return M", "def MathonPseudocyclicMergingGraph(M, t):\n from sage.graphs.graph import Graph\n from sage.matrix.constructor import identity_matrix\n assert len(M) == 4\n assert M[0] == identity_matrix(M[0].nrows())\n A = sum(x.tensor_product(x) for x in M[1:])\n if t > 0:\n A += sum(x.tensor_product(M[0]) for x in M[1:])\n if t > 1:\n A += sum(M[0].tensor_product(x) for x in M[1:])\n return Graph(A)", "def mulsumk(A, B):\n\tA = tf.expand_dims(A, axis=-1)\n\tB = tf.expand_dims(B, axis=-3)\n\treturn tf.reduce_sum(A*B, axis=-2)", "def prod(n):\n product = S.One\n for i in n:\n product = product * i\n return product", "def __mul__(self, tensor):\n return self.mul(tensor)", "def prod_of_nth(n):\n factorial = 1\n for i in range(1,n+1):\n factorial *= i\n return factorial", "def TMM(x,N,n,trun_basis):\n Mat = np.zeros([len(trun_basis),len(trun_basis)])\n print('making TMM')\n perms = [int((x**n * iii)%N) for iii in trun_basis] # Modular multiplication\n for iii in range(len(trun_basis)):\n if trun_basis.__contains__(perms[iii]):\n Mat[iii,trun_basis.index(perms[iii])] = 1\n return Mat", "def ncm(self,x,p):\n n = self.n\n n_p = self.n_p\n x = np.reshape(np.hstack((x,p)),(1,n+n_p))\n cholM = self.model.predict(x)\n cholM = np.reshape(cholM,int(n*(n+1)/2))\n M = self.cholM2M(cholM)\n return M", "def mult(n,m):\n result = 0\n\n if m == 0 or n == 0:\n result = 0\n\n elif n > 0:\n for x in range(n):\n result = result + m\n else:\n for x in range(-n):\n result = result - m\n return result", "def number_operator(n_modes=None, mode=None, coefficient=1.):\n if (mode is None and n_modes is None) or (mode and n_modes):\n raise ValueError(\n \"Please provide the correct value of n_modes and mode.\")\n\n operator = FermionOperator()\n if mode is None:\n for m in range(n_modes):\n operator += FermionOperator(((m, 1), (m, 0)), coefficient)\n else:\n operator = FermionOperator(((mode, 1), (mode, 0)), coefficient)\n return operator", "def n(self):\n return self._nx * self._ny", "def nptn_layer(input, kernel_size, in_channels, out_channels, num_transforms):\n assert len(input.shape) == 4\n\n filter_shape = (kernel_size, kernel_size, in_channels,\n out_channels * num_transforms)\n filter = tf.get_variable('weights', filter_shape, dtype=input.dtype)\n bias = tf.get_variable('bias', (1,), dtype=input.dtype)\n\n # Step 1: Convolution\n # The output of the convolution should have all outputs for a particular\n # input channel grouped together\n depthwise_out = tf.nn.depthwise_conv2d(input, filter, [1, 1, 1, 1], 'SAME')\n\n # Step 2: \"Volumetric max pooling\" across transformations\n splits = tf.split(depthwise_out, in_channels * out_channels, axis=3)\n max_splits = [tf.reduce_max(s, axis=3, keep_dims=True) for s in splits]\n\n # Steps 3 and 4: Reordering and \"volumetric mean pooling\"\n outputs = []\n for i in range(out_channels):\n gathered = tf.concat(max_splits[i::out_channels], 3)\n outputs.append(tf.reduce_mean(gathered, axis=3, keep_dims=True))\n output = tf.concat(outputs, 3)\n\n return output + bias", "def matrix_chain_dynamic(dimensions, n):\n\n m = [[-1 for _ in range(n)] for _ in range(n)]\n s = [[0 for _ in range(n)] for _ in range(n)]\n\n # multiplying matrix by itself\n for i in range(1, n):\n m[i][i] = 0\n\n for length in range(2, n):\n for i in range(1, n - length + 1):\n j = i + length - 1\n for k in range(i, j):\n cost = m[i][k] + m[k + 1][j] + dimensions[i - 1] * dimensions[k] * dimensions[j]\n if cost > m[i][j]:\n m[i][j] = cost\n # index if splitting\n s[i][j] = k\n return m, s", "def ket(i, dims):\n if not isinstance(i, list):\n i=[i]\n #Single qubit\n if len(i)==1:\n val = np.zeros((dims,1))\n val[i] = 1\n return val.reshape(dims,1)\n #multiple qubits. we need to tensor them together\n val = np.ones((1,1)) #initialize variable, so we have something to tensor with, the first time\n for x in i:\n val = np.tensordot(val,ket([x],dims), axes=0).transpose(0,2,1,3)\n val = val.reshape(val.shape[0]*val.shape[1],val.shape[2]*val.shape[3])\n return val.reshape(val.shape[0],1)", "def vect_contract(m, c, n):\n a = np.tensordot(m, c, (0, 0))\n mn = np.tensordot(a, n, (2, 0))\n return mn", "def polynomi_3N(N,X):\n\tY = N[0]*X*X*X + N[1]*X*X + N[2]*X + N[3]\n\treturn Y", "def call(self, reshaped_input):\n \"\"\"\n In Keras, there are two way to do matrix multiplication (dot product)\n 1) K.dot : AxB -> when A has batchsize and B doesn't, use K.dot\n 2) tf.matmul: AxB -> when A and B both have batchsize, use tf.matmul\n \n Error example: Use tf.matmul when A has batchsize (3 dim) and B doesn't (2 dim)\n ValueError: Shape must be rank 2 but is rank 3 for 'net_vlad_1/MatMul' (op: 'MatMul') with input shapes: [?,21,64], [64,3]\n \n tf.matmul might still work when the dim of A is (?,64), but this is too confusing.\n Just follow the above rules.\n \"\"\"\n \n ''' Computation of N_v in Equation 3 of the paper '''\n activation = K.dot(reshaped_input, self.cluster_weights)\n \n activation += self.cluster_biases\n \n activation = tf.nn.softmax(activation)\n\n activation = tf.reshape(activation,\n [-1, self.max_samples, self.cluster_size])\n\n activation = tf.transpose(activation,perm=[0,2,1])\n \n reshaped_input = tf.reshape(reshaped_input,[-1,\n self.max_samples, self.feature_size])\n\n vlad = tf.matmul(activation,reshaped_input)\n vlad = tf.transpose(vlad,perm=[0,2,1])\n vlad = tf.nn.l2_normalize(vlad,1)\n vlad = tf.reshape(vlad,[-1, self.cluster_size*self.feature_size])\n Nv = tf.nn.l2_normalize(vlad,1)\n \n # Equation 3 in the paper\n # \\hat{y} = W_N N_v\n vlad = K.dot(Nv, self.Wn)\n\n return vlad", "def permute_to_N_HWA_K(tensor, K):\n assert tensor.dim() == 4, tensor.shape\n N, _, H, W = tensor.shape\n tensor = tensor.view(N, -1, K, H, W)\n tensor = tensor.permute(0, 3, 4, 1, 2)\n tensor = tensor.reshape(N, -1, K) # Size=(N,HWA,K)\n return tensor", "def prod(k1,k2,tensor=False):\r\n part = parts.prod.Prod(k1, k2, tensor)\r\n return kern(part.input_dim, [part])", "def MM(x,N,n,l,t=0):\n Mat = np.zeros([2**l,2**l])\n for iii in range(N):\n Mat[iii,(x**n * iii)%N] = 1\n return Mat", "def einsum(ops, *args):\n\n if len(args) != 2:\n raise ValueError(\"Currently only two operands are supported\")\n\n inops, outops = ops.split('->')\n inops = inops.split(',')\n\n # All indices that are in input AND in output are multiplies\n multiplies = sorted(list(set(inops[0]) & set(inops[1]) & set(outops)))\n # All indices that are in input BUT NOT in output are sum contractions\n sums = sorted(list((set(inops[0]) & set(inops[1])) - set(outops)))\n\n # Map sums and indices to axis integers\n multiplies = [[inop.find(x) for x in multiplies] for inop in inops]\n sums = [[inop.find(x) for x in sums] for inop in inops]\n\n # Find output axes in input axes for final transpose\n # Values very likely lie outside of output tensor shape, so\n # just map them values to their rank (index in ordered list)\n transpose = [''.join(inops).find(x) for x in outops]\n transpose = scipy.stats.rankdata(transpose).astype(int) - 1\n\n return tensordot2(*args, sum=sums, multiply=multiplies).transpose(transpose)", "def ncon(A, B):\n return numpy.transpose(numpy.tensordot(A,\n B,\n axes=((2, 6, 3, 7), (0, 1, 2,\n 3))),\n axes=(0, 2, 1, 3, 4, 5))", "def multinomial(random_state, size=None, n=1, pvals=[0.5, 0.5],\r\n ndim=None, dtype='int64'):\r\n n = tensor.as_tensor_variable(n)\r\n pvals = tensor.as_tensor_variable(pvals)\r\n # until ellipsis is implemented (argh)\r\n tmp = pvals.T[0].T\r\n ndim, size, bcast = _infer_ndim_bcast(ndim, size, n, tmp)\r\n bcast = bcast + (pvals.type.broadcastable[-1],)\r\n op = RandomFunction(multinomial_helper,\r\n tensor.TensorType(dtype=dtype, broadcastable=bcast),\r\n ndim_added=1)\r\n return op(random_state, size, n, pvals)", "def it_matrixpower(p,t,n,root_field=RR):\n assert n>=2, \"Carleman matrix must at least be of size 2 to retrieve the coefficients. But given was \" + repr(n)\n CM = p.carleman_matrix(n)\n ev = CM.charpoly().roots(root_field)\n assert len(ev) == n, \"Carleman matrix must have exactly \" + repr(n) + \"eigenvalues, but has \" + repr(len(ev))\n\n Char = [0]*n\n for k in range(n):\n #here is possibility for improvement of precision\n #to separate the fractional from the root parts\n #expanding the product\n Char[k] = CM - ev[k][0]*identity_matrix(n)\n\n #we want to have the first row of the product of the matrices\n #thatswhy we mulitply in front with:\n prod = vector(p.K,[0,1]+[0]*(n-2))\n prodwo = [0]*n\n for k in range(n):\n prodwo[k]=prod #these are the first terms until k-1\n\n #no need to continue\n if k == n-1:\n break\n\n #and we add the terms starting with k+1\n for i in range(k+1,n):\n prodwo[k] = prodwo[k] * Char[i]\n\n prod = prod * Char[k]\n\n sprodwo = [0]*n\n for k in range(n):\n if k==0:\n sprodwo[k] = ev[k][0] - ev[1][0]\n start = 2\n else:\n sprodwo[k] = ev[k][0] - ev[0][0]\n start = 1\n\n for i in range(start,n):\n if i != k:\n sprodwo[k] = sprodwo[k] * (ev[k][0] - ev[i][0])\n\n res = ev[0][0]**t/sprodwo[0] * prodwo[0]\n for k in range(1,n):\n res += ev[k][0]**t/sprodwo[k]*prodwo[k]\n\n return res.list()", "def matmul_3d_by_2d(in_tensor, in_matrix, name):\n with tf.name_scope(name, default_name=\"Matmul_3D_by_2D\", values=[in_tensor, in_matrix]):\n tensor_shape = tf.shape(in_tensor)\n matrix_shape = tf.shape(in_matrix)\n return tf.reshape(tf.matmul(\n tf.reshape(in_tensor, [tensor_shape[0]*tensor_shape[1], tensor_shape[2]]), in_matrix),\n [tensor_shape[0], tensor_shape[1], matrix_shape[1]])", "def basis_fns(n=0):\n return lambda x: np.sum(x ** (n+1), axis=1)", "def tensordot2(A, B, sum=None, multiply=None):\n if sum is None:\n sum = [[], []]\n else:\n sum = list(sum)\n\n if multiply is None:\n multiply = [[], []]\n else:\n multiply = list(multiply)\n\n # For each multiply[0] we are adding one axis, thus we need to increment\n # all following items by one: (0, 1, 2) -> (0, 2, 4)\n # We need to account that the array may be unsorted\n idx = np.argsort(multiply[0])\n post_multiply = multiply[0]\n for i, v in enumerate(idx):\n post_multiply[v] += i\n\n for i in post_multiply:\n A = A.diag_axis(i)\n\n sum[0] += post_multiply\n sum[1] += multiply[1]\n\n return tensordot(A, B, axes=sum)", "def permute(n, r):\n\n product = 1\n for i in range(n - r + 1, n + 1):\n product *= i\n return product", "def __mul__(self, N):\n assert isinstance(N, int)\n if self.N > 1:\n raise Exception('Replication is not supported for 2D')\n return Solution([self.tolist()] * N, self.fitness.tolist() * N, \n self.n_eval.tolist() * N, var_name=self.var_name, \n n_obj=self.n_obj, verbose=self.verbose)", "def __mul__(self, N):\n assert isinstance(N, int)\n if self.N > 1:\n raise Exception('Replication is not supported for 2D')\n return Solution([self.tolist()] * N, self.fitness.tolist() * N, \n self.n_eval.tolist() * N, var_name=self.var_name, verbose=self.verbose)", "def nn(x, w):\n return np.dot(x, w)", "def tensor_outer_product(tensor1, tensor2):\n shape_1 = tensor1.shape\n shape_2 = tensor2.shape\n s1 = len(shape_1)\n s2 = len(shape_2)\n \n shape_1 = shape_1 + (1, )*s2\n shape_2 = (1, )*s1 + shape_2\n return np.reshape(tensor1, shape_1) * np.reshape(tensor2, shape_2)", "def dot_product(tensor_nd, tensor_2d):\n # Get shape and replace unknown shapes (None) with -1\n shape_nd = tensor_nd.get_shape().as_list()\n shape_nd = [s if isinstance(s, int) else -1 for s in shape_nd]\n shape_2d = tensor_2d.get_shape().as_list()\n if len(shape_2d) > 2:\n raise ValueError(\"tensor_2d must be a 1D or 2D tensor\")\n \n if len(shape_2d) == 1:\n tensor_2d = tf.expand_dims(tensor_2d, 0)\n if len(shape_nd) == 1:\n shape_nd = tf.expand_dims(shape_nd, 0)\n \n if len(shape_nd) > 2:\n # collapse axes except for ones to multiply and perform matmul\n dot_prod = tf.matmul(tf.reshape(tensor_nd, [-1, shape_nd[-1]]), tensor_2d)\n # reshape to correct dimensions\n dot_prod = tf.reshape(dot_prod, shape_nd[:-1] + shape_2d[-1:])\n elif len(shape_nd) == 2:\n dot_prod = tf.matmul(tensor_nd, tensor_2d)\n else:\n dot_prod = tf.matmul(tf.expand_dims(tensor_nd, 0), tensor_2d)\n \n return dot_prod", "def n_parameters_per_matrix(weight, n_bits):\n return 2**weight * (2**weight - 1)", "def Reduce(N):\r\n M = N.copy()\r\n lead = 0\r\n rowCount = M.shape[0]\r\n columnCount = M.shape[1]\r\n B1=eye(rowCount)\r\n for r in range(rowCount): \r\n if (columnCount <= lead):\r\n return B1,M\r\n i = r\r\n while (M[i, lead] == 0):\r\n i = i + 1\r\n if (rowCount == i):\r\n i = r\r\n lead = lead + 1\r\n if (columnCount == lead):\r\n return B1,M\r\n B1.row_swap(i, r)\r\n M.row_swap(i, r)\r\n a=M[r,lead]\r\n for k in range(columnCount):\r\n M[r,k]=S(M[r,k])/a\r\n for k in range(rowCount):\r\n B1[r,k]=S(B1[r,k])/a\r\n for i in range(0,rowCount):\r\n if (i != r):\r\n a=M[i,lead]\r\n for k in range(0,columnCount):\r\n M[i,k]=M[i,k]-M[r,k]*a\r\n for k in range(rowCount):\r\n B1[i,k]=B1[i,k]-B1[r,k]*a\r\n lead = lead + 1\r\n return B1,M", "def obj_u_opt_N_opt(u, T, alpha, B, N, num_tumor_voxels, Td = 2):\n x = T.dot(u)\n alpha_tilde = alpha #np.repeat(N, num_tumor_voxels)*alpha\n B_tilde = B #np.repeat(N, num_tumor_voxels)*B\n #Note that all modalities must have the same number of tumor voxels:\n return alpha_tilde.T.dot(x) - x.T.dot(B_tilde*x) + num_tumor_voxels*(np.sum(N)-1)*(np.log(2)/Td)", "def __matmul__(self, tensor):\n return self.matmul(tensor)", "def _generate_mult_process(X, mat, inits):\n M = np.empty_like(X, dtype=float)\n M[..., 0] = inits[X[..., 0]]\n M[..., 1:] = mat[X[..., :-1], X[..., 1:]]\n np.cumprod(M, axis=-1, out=M)\n return M", "def contract(tensor):\n temp = np.einsum('ikma, jlan', tensor, tensor)\n M = np.zeros((tensor.shape[0]**2, tensor.shape[1]**2, tensor.shape[2], tensor.shape[3]))\n for i,j,k,l,m,n in it.product(*[range(x) for x in temp.shape]):\n M[i + tensor.shape[0]*j, k + tensor.shape[1]*l, m, n] = temp[i,j,k,l,m,n]\n return M", "def calculate_matmul_n_times(n_components, mat_a, mat_b):\n res = np.zeros(mat_a.shape)\n mat_a = tf.cast(mat_a, tf.double)\n mat_b = tf.cast(mat_b, tf.double)\n for i in range(n_components):\n mat_a_i = tf.squeeze(mat_a[:, i, :, :], -2)\n mat_b_i = tf.squeeze(mat_b[0, i, :, :])\n res[:, i, :, :] = tf.expand_dims(tf.matmul(mat_a_i, mat_b_i), 1)\n\n return tf.convert_to_tensor(res)", "def TensorProduct(**kw_kernels):\n return Composite('*', **kw_kernels)", "def _get_modes(self, M=0, N=0):\n dim_pol = 2 * M + 1\n dim_tor = 2 * N + 1\n m = np.arange(dim_pol) - M\n n = np.arange(dim_tor) - N\n mm, nn = np.meshgrid(m, n)\n mm = mm.reshape((-1, 1), order=\"F\")\n nn = nn.reshape((-1, 1), order=\"F\")\n z = np.zeros_like(mm)\n y = np.hstack([z, mm, nn])\n return y", "def add_mode(self, n=1):\n self.circuit.add_mode(n)", "def tensormul(t1, t2):\n dim1 = t1.get_shape().as_list()[-1]\n dim2 = t2.get_shape().as_list()[-1]\n result_shape_tensors = tf.unstack(tf.shape(t1))\n result_shape_tensors[-1] = dim2\n result_shape_tensor = tf.stack(result_shape_tensors)\n t1 = tf.reshape(t1, [-1, dim1])\n result = tf.matmul(t1, t2)\n result = tf.reshape(result, result_shape_tensors)\n return result", "def ec_matrix_vector(p0, T, n): \n if(n<=0):\n EC=np.zeros(T.shape)\n return EC\n else:\n \"\"\"Probability vector after (k=0) propagations\"\"\" \n p_k=1.0*p0\n \"\"\"Sum of vectors after (k=0) propagations\"\"\"\n p_sum=1.0*p_k \n for k in xrange(n-1):\n \"\"\"Propagate one step p_{k} -> p_{k+1}\"\"\"\n p_k=np.dot(p_k,T) \n \"\"\"Update sum\"\"\"\n p_sum+=p_k \n \"\"\"Expected counts\"\"\"\n EC=p_sum[:,np.newaxis]*T \n return EC", "def tensordot(module, x, y, axes):\n _import_modules()\n if module in [np, ma]:\n return np.tensordot(x, y, axes)\n elif module == torch:\n return torch.tensordot(x, y, axes)\n elif module == jnp:\n return jnp.tensordot(x, y, axes)\n elif module == tf:\n return tf.tensordot(x, y, axes)\n raise UnknownModuleException(f\"Module {module.__name__} not supported.\")", "def n_dimensional_weightmul(L, W, L_shape, Lout_shape, first_dim_of_l_is_batch=True):\n if not first_dim_of_l_is_batch:\n raise NotImplementedError\n if len(L_shape) == 1 and len(Lout_shape) == 1:\n return tf.matmul(L, W)\n # L : ?xN1xN2xN3x...\n # Lout : ?xM1xM2xM3x...\n # W : N1xN2x...xM1xM2x...\n # Einstein notation: letter b (denotes batch dimension)\n # Lout_blmn... = L_bijk... * Wijk...lmn...\n letters = list('ijklmnopqrst')\n l_subscripts = ''.join([letters.pop(0) for _ in range(len(L_shape))])\n lout_subscripts = ''.join([letters.pop(0) for _ in range(len(Lout_shape))])\n einsum_string = 'b'+l_subscripts+','+l_subscripts+lout_subscripts+'->'+'b'+lout_subscripts\n return tf.einsum(einsum_string,L,W)", "def attn_sum_dot(keys, query):\n return tf.reduce_sum(keys * tf.expand_dims(query, 1), [2])", "def get_matrixS(n):\n\n mat_nxn = np.zeros([n, n], dtype=int)\n for row_num in range(1, n + 1):\n i = row_num - 1\n if row_num == 1:\n mat_nxn[i][i + 1] = 1\n mat_nxn[i][i + 2] = 1\n elif row_num == 2:\n mat_nxn[i][i - 1] = 1\n mat_nxn[i][i + 2] = 1\n elif row_num == n - 1:\n mat_nxn[i][i + 1] = 1\n mat_nxn[i][i - 2] = 1\n elif row_num == n:\n mat_nxn[i][i - 1] = 1\n mat_nxn[i][i - 2] = 1\n elif row_num % 2 == 1:\n mat_nxn[i][i + 1] = 1\n mat_nxn[i][i + 2] = 1\n mat_nxn[i][i - 2] = 1\n elif row_num % 2 == 0:\n mat_nxn[i][i - 1] = 1\n mat_nxn[i][i + 2] = 1\n mat_nxn[i][i - 2] = 1\n mat_nxn = mat_nxn + np.eye(n, dtype=int)\n mat_2nx2n = np.repeat(np.repeat(mat_nxn, 2, 0), 2, 1)\n return torch.as_tensor(mat_2nx2n)", "def tensor_network_tt_einsum(inputs, states, output_size, rank_vals, bias, bias_start=0.0):\n\n # print(\"Using Einsum Tensor-Train decomposition.\")\n num_orders = len(rank_vals)+1#alpha_1 to alpha_{K-1}\n num_lags = len(states)\n batch_size = tf.shape(inputs)[0] \n state_size = states[0].get_shape()[1].value #hidden layer size\n input_size= inputs.get_shape()[1].value\n\n\n total_state_size = (state_size * num_lags + 1 )\n\n # These bookkeeping variables hold the dimension information that we'll\n # use to store and access the transition tensor W efficiently.\n mat_dims = np.ones((num_orders,)) * total_state_size\n\n # The latent dimensions used in our tensor-train decomposition.\n # Each factor A^i is a 3-tensor, with dimensions [a_i, hidden_size, a_{i+1}]\n # with dimensions [mat_rank[i], hidden_size, mat_rank[i+1] ]\n # The last\n # entry is the output dimension, output_size: that dimension will be the\n # output.\n mat_ranks = np.concatenate(([1], rank_vals, [output_size]))\n\n # This stores the boundary indices for the factors A. Starting from 0,\n # each index i is computed by adding the number of weights in the i'th\n # factor A^i.\n mat_ps = np.cumsum(np.concatenate(([0], mat_ranks[:-1] * mat_dims * mat_ranks[1:])),dtype=np.int32)\n mat_size = mat_ps[-1]\n\n # Compute U * x\n weights_x = vs.get_variable(\"weights_x\", [input_size, output_size] )\n out_x = tf.matmul(inputs, weights_x)\n\n # Get a variable that holds all the weights of the factors A^i of the\n # transition tensor W. All weights are stored serially, so we need to do\n # some bookkeeping to keep track of where each factor is stored.\n mat = vs.get_variable(\"weights_h\", mat_size) # h_z x h_z... x output_size\n\n #mat = tf.Variable(mat, name=\"weights\")\n states_vector = tf.concat(states, 1)\n states_vector = tf.concat( [states_vector, tf.ones([batch_size, 1])], 1)\n \"\"\"form high order state tensor\"\"\"\n states_tensor = states_vector\n for order in range(num_orders-1):\n states_tensor = _outer_product(batch_size, states_tensor, states_vector)\n\n # print(\"tensor product\", states_tensor.name, states_tensor.get_shape().as_list())\n cores = []\n for i in range(num_orders):\n # Fetch the weights of factor A^i from our big serialized variable weights_h.\n mat_core = tf.slice(mat, [mat_ps[i]], [mat_ps[i + 1] - mat_ps[i]])\n mat_core = tf.reshape(mat_core, [mat_ranks[i], total_state_size, mat_ranks[i + 1]]) \n cores.append(mat_core)\n \n out_h = tensor_train_contraction(states_tensor, cores)\n # Compute h_t = U*x_t + W*H_{t-1}\n res = tf.add(out_x, out_h)\n\n # print \"END OF CELL CONSTRUCTION\"\n # print \"========================\"\n # print \"\"\n\n if not bias:\n return res\n biases = vs.get_variable(\"biases\", [output_size])\n\n return nn_ops.bias_add(res,biases)", "def multiply(self, n): \n f_num = self.num*n.num\n f_den = self.den*n.den\n f = Fraction(f_num, f_den)\n return f.reduce()", "def conv_layer_as_matrix_op(self, W, b, x, n, k):\n\n i = b.shape[0]\n\n output_im_size = n - k + 1\n\n Wx = (W @ x).reshape(i, output_im_size, output_im_size)\n return np.maximum(Wx + b.reshape(i, 1, 1), 0)", "def pow(op, n):\n return compose(* ([op] * n))", "def nN(self):\n return int(self.vnN.prod())", "def get_Kn(self, T):\n\n mfp = self.get_mfp(T)\n\n self.Kn = mfp / self.Kn_length\n\n return self.Kn", "def mpool1( x, p):\n if p > 1:\n x = tf.expand_dims(x, 3) # N x M x F x 1\n x = tf.nn.max_pool(x, ksize=[1, p, 1, 1], strides=[1, p, 1, 1], padding='SAME')\n # tf.maximum\n return tf.squeeze(x, [3]) # N x M/p x F\n else:\n return x", "def nd_kernel(n):\n n = int(n)\n total_size = 3**n\n mid_point = int((3**n - 1)/2)\n kern = np.zeros(total_size, dtype=bool)\n for i in range(n):\n kern[mid_point-3**i] = True\n kern[mid_point+3**i] = True\n new_shape = 3*np.ones(n, dtype=int) \n unnormed_kern = kern.reshape(new_shape)\n return unnormed_kern/unnormed_kern.sum()", "def transform(i, j, k):\n return i * N * N + j * N + k + 1", "def make_mult_op(k: int) -> Callable[[V], M]:\n def op(v: V) -> M:\n \"\"\"Multiplication operator function.\n\n :v: Vector of shape (2 * 4 + 1,).\n :returns: Toeplitz matrix m of shape (2 * k + 1, 2 * k + 1).\n\n \"\"\"\n c = v[2 * k:]\n r = np.flip(v[:2 * k + 1])\n m: M = toeplitz(c, r)\n return m\n return op", "def _prod_vectorized(M1, M2):\n sh1 = M1.shape\n sh2 = M2.shape\n assert len(sh1) >= 2\n assert len(sh2) >= 2\n assert sh1[-1] == sh2[-2]\n\n ndim1 = len(sh1)\n t1_index = list(xrange(ndim1-2)) + [ndim1-1, ndim1-2]\n return np.sum(np.transpose(M1, t1_index)[..., np.newaxis] *\n M2[..., np.newaxis, :], -3)", "def tri(N, M=None, k=0, dtype=None):\r\n if dtype is None:\r\n dtype = config.floatX\r\n if M is None:\r\n M = N\r\n op = Tri(dtype)\r\n return op(N, M, k)", "def modexp(x,y,n):\n\tif y == 0: return 1\n\n\tpartial = modexp(x, y/2, n)\n\n\tif y%2 == 0: return (partial**2) % n\n\telse: return (x*partial**2) % n", "def lanczos_decomp(vector_prod_fn, scalar, n, k):\n Q = tf.zeros([n, 1])\n v = tf.random_uniform([n, 1])\n v = v / tf.norm(v)\n Q = tf.concat([Q, v], axis=1)\n\n # diagonals of the tridiagonal matrix\n beta = tf.constant(0.0, dtype=tf.float32, shape=[1])\n alpha = tf.constant(0.0, dtype=tf.float32, shape=[1])\n\n for i in range(k):\n v = vector_prod_fn(tf.reshape(Q[:, i+1], [n, 1])) - tf.scalar_mul(scalar, tf.reshape(Q[:, i+1], [n, 1]))\n v = tf.reshape(v, [n,])\n curr_alpha = tf.reshape(tf.reduce_sum(v * Q[:, i+1]), [1,])\n alpha = tf.concat([alpha, curr_alpha], axis=0)\n v = v-beta[-1]*Q[:, i]-alpha[-1]*Q[:, i+1]\n curr_beta = tf.reshape(tf.norm(v), [1,])\n beta = tf.concat([beta, curr_beta], axis=0)\n curr_norm = tf.reshape(v/(beta[-1]+1e-8), [n, 1])\n Q = tf.concat([Q, curr_norm], axis=1)\n\n alpha = tf.slice(alpha, begin=[1], size=[-1])\n beta = tf.slice(beta, begin=[1], size=[k-1])\n Q = tf.slice(Q, begin=[0, 1], size=[-1, k])\n return alpha, beta, Q", "def nN(self):\n return int((self._n+1).prod())", "def spmv (n, A, x):\n y = dense_vector (n)\n for (i, A_i) in A.items ():\n s = 0\n for (j, a_ij) in A_i.items ():\n s += a_ij * x[j]\n y[i] = s\n return y", "def nin(x, num_units, **kwargs):\n s = int_shape(x)\n x = tf.reshape(x, [np.prod(s[:-1]),s[-1]])\n x = dense_layer(x, num_units, **kwargs)\n return tf.reshape(x, s[:-1]+[num_units])", "def multinomial_coefficient(partition, n=None):\n tot = 0\n deg = 1\n for p in partition:\n tot += p\n deg *= factorial(p)\n if n is None:\n n = tot\n return factorial(n)//deg//factorial(n-tot)", "def nth_permutation(elems, n):\n pos, summ = 0, 0\n permutation = \"\"\n for i in reversed(range(1,len(elems))):\n fact_i = fact(i)\n while summ+fact_i < n:\n summ += fact_i\n pos += 1\n permutation += str(elems[pos])\n del(elems[pos])\n pos = 0\n return permutation + str(elems[0])", "def tensordot(z, n_times_atom):\n n_atoms, n_trials, n_times_valid = z.shape\n\n ztz = np.zeros(shape=(n_atoms, n_atoms, 2 * n_times_atom - 1))\n t0 = n_times_atom - 1\n\n axes = ([1, 2], [1, 2])\n\n for t in range(n_times_atom):\n if t == 0:\n ztz[:, :, t0] += np.tensordot(z, z, axes=axes)\n else:\n tmp = np.tensordot(z[:, :, :-t], z[:, :, t:], axes=axes)\n ztz[:, :, t0 + t] += tmp\n tmp = np.tensordot(z[:, :, t:], z[:, :, :-t], axes=axes)\n ztz[:, :, t0 - t] += tmp\n\n return ztz", "def __imul__(self, tensor):\n return self.mul_(tensor)", "def compute_transductive_complexity_term(d_function, m, N):\n one_over_m = 1./m\n m_over_N = float(m)/N\n \n def inside_sum(K):\n if K == 0: return 1.\n K_over_N = K/float(N)\n return sum([ exp(hypergeom.logpmf(k, N, K, m) + m*d_function(k*one_over_m, K_over_N, m_over_N) ) for k in np.arange( max(0,K+m-N), min(m,K)+1 ) ])\n \n return max( [ inside_sum(K) for K in np.arange( 0, N+1 ) ] )", "def dot_product(x, kernel):\n if K.backend() == 'tensorflow':\n return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)\n else:\n return K.dot(x, kernel)", "def dot_product(x, kernel):\n if K.backend() == 'tensorflow':\n return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)\n else:\n return K.dot(x, kernel)", "def dot_product(x, kernel):\n if K.backend() == 'tensorflow':\n return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)\n else:\n return K.dot(x, kernel)", "def dot_product(x, kernel):\n if K.backend() == 'tensorflow':\n return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)\n else:\n return K.dot(x, kernel)", "def dot_product(x, kernel):\r\n if K.backend() == 'tensorflow':\r\n return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)\r\n else:\r\n return K.dot(x, kernel)", "def __mul__(self, n):\n vectors = [n * Vector(*(p - self.center)) for p in self.points]\n return Form([vectors[i](self.points[i]) for i in range(len(self.points))])", "def symmetrize(n):\n times = lambda x: jnp.concatenate((jnp.flipud(x), x))\n trans = lambda x: x[n:] + x[n-1::-1]\n return Operator(times=times, trans=trans, shape=(2*n,n))", "def build_M(K, R, T):\n external = np.append(R, T, axis=1)\n M = K.dot(external)\n return M", "def interior_tensor_product(mx, dim_a, dim_b, e=None):\n assert _np.shape(mx) == (dim_a * dim_b, dim_a * dim_b), \"Dimensions do not agree with matrix size\"\n assert _np.shape(e)[0] == _np.shape(e)[1], \"e should be a square matrix\"\n basis_a = matrix_units(dim_a)\n basis_b = matrix_units(dim_b)\n return sum((_np.trace(_np.dot(mx, _np.kron(unit_a, unit_b).T)) * multikron([unit_a, e, unit_b])\n for unit_a in basis_a for unit_b in basis_b))", "def n_parameters(weight, n_bits):\n n_w = n_parameters_per_matrix(weight, n_bits)\n\n # Number of ways to pick weight bits out of n_bits\n n_a = n_matrices_per_weight(weight, n_bits)\n\n return n_w * n_a", "def multiply(t):\n return mul(*t)", "def dot_prod(t1: torch.Tensor, t2: torch.Tensor, verbose: bool = False):\n assert t1.size() == t2.size(), \"Sizes for dot-product must match\"\n return mo.dot_prod(t1, t2, verbose)", "def _apply_two_mode_gate(G, T, i, j):\n (T[i], T[j]) = (G[0, 0] * T[i] + G[0, 1] * T[j], G[1, 0] * T[i] + G[1, 1] * T[j])\n return T", "def dot_product(x, kernel):\n if K.backend() == 'tensorflow':\n # K.expand_dims ้ป˜่ฎคaxis=-1\n return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)\n else:\n return K.dot(x, kernel)", "def matrix_mult(m1, m2):\n pass", "def _weight_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):\n raise NotImplementedError" ]
[ "0.60840887", "0.5962957", "0.5834595", "0.57553023", "0.5729404", "0.56230754", "0.55998474", "0.558549", "0.55777717", "0.5541569", "0.54854864", "0.54752266", "0.53972495", "0.53863096", "0.53441954", "0.5337099", "0.5322091", "0.5305267", "0.5296987", "0.52967924", "0.5278204", "0.5273558", "0.52567583", "0.5210994", "0.5205668", "0.5165869", "0.51604676", "0.5139274", "0.51357865", "0.51123035", "0.510939", "0.50964", "0.5079341", "0.5070047", "0.50657034", "0.50498444", "0.5049244", "0.5044337", "0.5042385", "0.50393957", "0.50295067", "0.50269115", "0.50164115", "0.5002216", "0.49919862", "0.4977479", "0.4964173", "0.4961968", "0.4961231", "0.49546877", "0.4953707", "0.4944151", "0.4941853", "0.4939858", "0.4925716", "0.4922577", "0.4919869", "0.49174494", "0.4916185", "0.49123922", "0.49046075", "0.48820448", "0.48784447", "0.48774657", "0.48751605", "0.48728713", "0.48717198", "0.48638856", "0.4861008", "0.48555714", "0.485071", "0.4843485", "0.4836328", "0.48345765", "0.48157415", "0.4812028", "0.48106152", "0.48064455", "0.4804836", "0.48022577", "0.47968513", "0.47964448", "0.4789052", "0.47880283", "0.4786366", "0.4786366", "0.4786366", "0.4786366", "0.478183", "0.47768536", "0.47731492", "0.47681808", "0.47622848", "0.4761867", "0.47565824", "0.47557753", "0.4753397", "0.47486997", "0.47443563", "0.47439533" ]
0.84308904
0
High order svd of ddim tensor A. so that A = S (1) u1 (2) u2 (3) u3 ... (d) ud, "(n)" means nmode product. S is the core, u1,u2,u3, ... are orthogonal basis. definition in paper "A MULTILINEAR SINGULAR VALUE DECOMPOSITION" by LIEVEN DE LATHAUWER , BART DE MOOR , AND JOOS VANDEWALLE
def HOSVD(A, k=None, tol=None): d=len(A.shape) if d==2: u, s, vt=svd(A, full_matrices=False) U=[u, vt.T] S=np.diag(s) else: U=[None]*d for j in range(0, d): U[j], s, vt=svd(unfold(A, j), full_matrices=False) S=A.copy() for i in range(0, d): S=nModeProduct(S, U[i].T, i) if k is not None: if isinstance(k, int): # if only one integer is assigned to k k=k*np.ones((len(A.shape),), dtype=int) S=subTensor(S, k=k) for j in range(0, d): U[j]=U[j][:, :k[j]] return S, U
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def svd0(A):\n M,N = A.shape\n if M>N: return sla.svd(A, full_matrices=True)\n else: return sla.svd(A, full_matrices=False)", "def svd(self, X): # [5pts]\n N,D = X.shape[0],X.shape[1]\n if X.ndim == 3:\n U = np.zeros((N,N,3))\n S = np.zeros((min(N,D),3))\n V = np.zeros((D,D,3))\n for i in range(3):\n U_temp,S_temp,V_temp = np.linalg.svd(X[:,:,i],compute_uv=True, full_matrices=True,hermitian=False)\n U[:,:,i] = U_temp\n S[:,i] = S_temp\n V[:,:,i] = V_temp\n else:\n U,S,V = np.linalg.svd(X,compute_uv=True,full_matrices=True, hermitian=False)\n return U,S,V", "def rSVD(A, rank):\n n, m = A.shape\n P = np.random.randn(m, rank)\n Z = A @ P\n q, r = np.linalg.qr(Z, mode=\"reduced\")\n Y = q.T @ A\n s, uy, v = svd(Y, min(min(Y.shape), rank))\n u = q @ uy\n return s, u, v", "def svd(T):\n try:\n U, S, V = splinalg.svd(T, full_matrices=False)\n except splinalg.LinAlgError:\n U, S, V = splinalg.svd(T, full_matrices=False, lapack_driver='gesvd')\n maxU, minU = U.max(0), U.min(0)\n maxV, minV = V.max(1), V.min(1)\n ind = (np.abs(minU) > maxU) & (np.abs(minV) > maxV)\n U[:, ind] *= -1\n V[ind] *= -1\n return U, S, V", "def svd(a, full_matrices=1, compute_uv=1):\n return SVD(full_matrices, compute_uv)(a)", "def svd(self):\n U, s, Vh = la.svd(self)\n S = np.zeros(self.shape)\n np.fill_diagonal(S, s)\n return (Matrix(U), Matrix(S), Matrix(Vh))", "def svd_S(T):\n try:\n S = splinalg.svd(T, full_matrices=False, compute_uv=False)\n except splinalg.LinAlgError:\n S = splinalg.svd(T, full_matrices=False, lapack_driver='gesvd', compute_uv=False)\n return S", "def svd(matrix):\n u = None\n s = None\n v = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return u, s, v", "def svd_stability(N=20, display=False):\n B = np.random.randn(N, N) # i.i.d normal matrix\n C = np.random.randn(N, N) # i.i.d normal matrix\n S = np.sort(np.abs(np.random.randn(N)))[::-1] # i.i.d normal vector\n\n # Generating U, V, sigma using QR (Cheap way to get orthogonal matrices)\n U, _ = np.linalg.qr(B) # U\n V, _ = np.linalg.qr(C) # V\n\n A = (U * S) @ V.T # Forming A\n\n U2, S2, VT2 = np.linalg.svd(A)\n\n norm1 = np.linalg.norm(U2 - U)\n norm2 = np.linalg.norm(S2 - S)\n norm3 = np.linalg.norm(VT2 - V.T)\n norm4 = np.linalg.norm(((U2 * S2) @ VT2) - A)\n\n if display:\n print(\"||U2 - U|| = %s\" %norm1)\n print(\"||sigma2 - sigma|| = %s\" %norm2)\n print(\"||V2 - V|| = %s\" %norm3)\n print(\"||(U2 @ S2 @ V2) - A|| = %s\" %norm4)\n return (norm1, norm2, norm3, norm4)", "def svd(matrix):\n u = None\n s = None\n v = None\n ### YOUR CODE HERE\n (u,s,v)=np.linalg.svd(matrix)\n ### END YOUR CODE\n\n return u, s, v", "def svd_approx(A, s):\n \n U, S, V = la.svd(A)\n V = V.conj().T\n if s > len(S):\n raise ValueError( str(len(S)) + \" = Rank(A) > s\" )\n \n U2 = U[:,:s]\n S2 = S[:s]\n V2 = V[:,:s]\n V2 = V2.conj().T\n \n S2 = np.diag(S2)\n \n Ag = U2@S2@V2\n ent = U2.size + len(S2) + V2.size\n return Ag, ent\n \n raise NotImplementedError(\"Problem 3 Incomplete\")", "def decompress_svd(size:tuple, svd_u, svd_s, svd_vh):\r\n m, n = size[0:2]\r\n u = np.zeros((3, m, m), dtype=np.float64)\r\n s = np.zeros((3, min(m, n)), dtype=np.float64)\r\n vh = np.zeros((3, n, n), dtype=np.float64)\r\n\r\n _,p = svd_s.shape\r\n u[:, 0:m, 0:p] = svd_u[:, :, :]\r\n s[:, 0:p] = svd_s[:, :]\r\n vh[:, 0:p, 0:n] = svd_vh[:, :, :]\r\n\r\n # SVD equation: A = U * D * VH\r\n img_svd = np.zeros(size, dtype=np.uint8)\r\n for k in range(3):\r\n d = np.zeros((m, n), dtype=np.float64)\r\n d[:min(m, n), :min(m, n)] = np.diag(s[k, :])[:, :]\r\n img_svd[:,:,k] = np.dot(np.dot(u[k,:,:], d), vh[k,:,:])\r\n return img_svd", "def svd_spd_decomposition(P):\n\t# Assert Matrix P is symetric\n\tassert check_symmetric(P)\n\n\t# singular value decomposition\n\tU, D, V = np.linalg.svd(P, full_matrices=False)\n\n\t# Create matrix W = Vtsqrt(diagnol(D)) #why Vt?\n\tM = np.dot(np.transpose(V), np.sqrt(np.diag(D)))\n\n\t#print(np.transpose(V))\n\n\treturn M", "def visualize_svd():", "def implement_svd(data):\n u, s, v = torch.svd(data) # implement svd\n # note: the u returned by this function only includes the top values.\n # u * s will be equivalent due to the zero terms, but will run more efficiently with this implementation.\n s = torch.diag(s) # turn s into a diagonal matrix\n transformed_matrix = torch.mm(u, s) # u * s\n return l21_reg(s), transformed_matrix # return the L2,1 regularization term and matrix", "def svd_images(imagear):\n n = np.shape(imagear)[1]\n u, s, v = np.linalg.svd(imagear/np.sqrt(n-1),full_matrices=False)\n \n return(u, s, v)", "def svd(a, full_matrices=False, compute_uv=True):\n #-> gesvd\n a, cv, isMatrix = get_computation_matrix(a)\n t_dtype = TypeUtil.to_numpy_dtype(a.get_dtype())\n (u, s, vt, _) = gesvd(a, compute_uv=compute_uv, \\\n full_matrices=full_matrices, lwork=0,\\\n overwrite_a=1, dtype=t_dtype)\n if not compute_uv:\n if cv:\n return s.to_numpy_array() # ndarray\n else:\n return s # FrovedisVector\n else:\n if cv and isMatrix:\n return (u.to_numpy_matrix(), s.to_numpy_array(),\\\n vt.to_numpy_matrix())\n elif cv and not isMatrix:\n return (u.to_numpy_array(), s.to_numpy_array(),\\\n vt.to_numpy_array())\n else:\n return (u, s, vt)", "def svd_split(m: np.ndarray):\n Dl, d, Dr = m.shape\n u, s, v = np.linalg.svd(m.reshape(Dl * 2, Dr * d // 2))\n D_middle = min(u.shape[-1], v.shape[0])\n s = np.diag(s[:D_middle])\n\n u = u[:, :D_middle].reshape((Dl, 2, D_middle))\n sv = s.dot(v[:D_middle]).reshape((D_middle, d // 2, Dr))\n return u, sv", "def visualize_svd():\n A=np.array([[3,1],[1,3]])\n U,s,Vh=truncated_svd(A)\n \n twopi=np.linspace(0,2.*np.pi,360)\n one=np.reshape(np.linspace(0,1,100),(1,100))\n zeros=np.zeros((1,100))\n S=np.vstack((np.reshape(np.cos(twopi),(1,360)),np.reshape(np.sin(twopi),(1,360))))\n e1=np.vstack((zeros,one))\n e2=e1[::-1] \t\n \n s1S=Vh.dot(S)\n s1e1=Vh.dot(e1)\n s1e2=Vh.dot(e2)\n\n s2S=np.diag(s).dot(s1S)\n s2e1=np.diag(s).dot(s1e1)\n s2e2=np.diag(s).dot(s1e2)\n \n s3S=U.dot(s2S)\n s3e1=U.dot(s2e1)\n s3e2=U.dot(s2e2)\n \n \n \n \n\n \n \n plt.subplot(221)\n plt.plot(S[0],s3S[1],\"b-.\",lw=2)\n plt.plot(e1[0],s3e1[1],\"g-.\",lw=2)\n plt.plot(e2[0],s3e2[1],\"r-.\",lw=2)\n \n \n \n plt.subplot(222)\n plt.plot(s1S[0],s3S[1],\"b-.\",lw=2)\n plt.plot(s1e1[0],s3e1[1],\"g-.\",lw=2)\n plt.plot(s1e2[0],s3e2[1],\"r-.\",lw=2)\n \n \n plt.subplot(223)\n plt.plot(s2S[0],s3S[1],\"b-.\",lw=2)\n plt.plot(s2e1[0],s3e1[1],\"g-.\",lw=2)\n plt.plot(s2e2[0],s3e2[1],\"r-.\",lw=2)\n \n plt.subplot(224) \n \n plt.plot(s3S[0],s3S[1],\"b-.\",lw=2)\n plt.plot(s3e1[0],s3e1[1],\"g-.\",lw=2)\n plt.plot(s3e2[0],s3e2[1],\"r-.\",lw=2)\n \n plt.show()", "def svd(matrix, rank=None):\n if matrix.ndim != 2:\n raise ValueError('Input should be a two-dimensional array. matrix.ndim is {} != 2'.format(matrix.ndim))\n dim_1, dim_2 = matrix.shape\n if dim_1 <= dim_2:\n min_dim = dim_1\n else:\n min_dim = dim_2\n\n if rank is None or rank >= min_dim:\n # Default on standard SVD\n U, S, V = scipy.linalg.svd(matrix)\n U, S, V = U[:, :rank], S[:rank], V[:rank, :]\n return U, S, V\n\n else:\n # We can perform a partial SVD\n # First choose whether to use X * X.T or X.T *X\n if dim_1 < dim_2:\n S, U = scipy.sparse.linalg.eigsh(np.dot(matrix, matrix.T), k=rank, which='LM')\n S = np.sqrt(S)\n V = np.dot(matrix.T, U * 1 / S[None, :])\n else:\n S, V = scipy.sparse.linalg.eigsh(np.dot(matrix.T, matrix), k=rank, which='LM')\n S = np.sqrt(S)\n U = np.dot(matrix, V) * 1 / S[None, :]\n\n # WARNING: here, V is still the transpose of what it should be\n U, S, V = U[:, ::-1], S[::-1], V[:, ::-1]\n return U, S, V.T", "def svd_approx(A, s):\r\n U, Si, Vh = la.svd(A)\r\n zeros = list(Si).count(0)\r\n #raise error if there are not enough nonzero singular values\r\n if len(Si) - zeros < s:\r\n raise ValueError(\"s > rank(A)\")\r\n #Only save first s singular values for Si\r\n Si_hat = np.diag(Si[:s])\r\n #Save first s columns of U\r\n U_hat = U[:,:s]\r\n #Save first s rows of Vh\r\n Vh_hat = Vh[:s,:]\r\n\r\n # return new A and num of entries needed\r\n return U_hat@Si_hat@Vh_hat, U_hat.size+s+Vh_hat.size", "def visualize_svd(A):\r\n theta = np.linspace(0,2*np.pi,200)\r\n #Set S as unit circle\r\n S = np.array([np.cos(theta), np.sin(theta)])\r\n #Set E as orthogonal basis\r\n E = np.array([[1,0,0],[0,0,1]])\r\n U,Si,Vh = la.svd(A)\r\n Si = np.diag(Si)\r\n\r\n #plot original S and E\r\n first = plt.subplot(221)\r\n first.plot(S[0], S[1])\r\n first.plot(E[0], E[1])\r\n first.axis(\"equal\")\r\n\r\n #rotate S,E and plot S,E\r\n second = plt.subplot(222)\r\n vhs = Vh@S\r\n vhe = Vh@E\r\n second.plot(vhs[0], vhs[1])\r\n second.plot(vhe[0], vhe[1])\r\n second.axis(\"equal\")\r\n\r\n #scale S,E and plot S,E\r\n third = plt.subplot(223)\r\n sivhs = Si@vhs\r\n sivhe = Si@vhe\r\n third.plot(sivhs[0],sivhs[1])\r\n third.plot(sivhe[0],sivhe[1])\r\n third.axis([-4,4,-4,4])\r\n\r\n #rotate S,E and plot S,E\r\n fourth = plt.subplot(224)\r\n usivhs = U@sivhs\r\n usivhe = U@sivhe\r\n fourth.plot(usivhs[0],usivhs[1])\r\n fourth.plot(usivhe[0],usivhe[1])\r\n fourth.axis([-4,4,-4,4])\r\n\r\n plt.show()", "def truncated_svd(A,k=None):", "def svd(matrix, approach):\n\n # Getting the eigenvalues and vectors of transpose(A) * A for V and Sigma\n a = mat_multiply(transpose(matrix), matrix)\n if approach == \"qr\":\n V, sigma, iterations = qr_eig(a)\n else:\n V, sigma, iterations = eig(a)\n\n # Sorting singular values and the colums of V accordingly\n V = transpose(V)\n\n singular_values = list()\n sorted_V = list()\n\n r = 0\n for i in range(rows(sigma)):\n singular_values.append([(sigma[i][i]), i])\n if sigma[i][i] > math.exp(-8):\n r += 1\n\n singular_values.sort(key=first_item, reverse=True)\n\n sigma_r = eye(r)\n sigma_r_inv = eye(r)\n\n # Constructing the sorted U and sigma matrices\n i, j = 0, 0\n for value in singular_values:\n if value[0] > math.exp(-8):\n sorted_V.append(V[value[1]])\n sigma_r[j][j] = value[0] ** (1 / 2)\n sigma_r_inv[j][j] = 1 / (value[0] ** (1 / 2))\n j += 1\n i += 1\n\n # Constructing U by multiplying V and sigma inverse\n sorted_U = mat_multiply(mat_multiply(matrix, transpose(sorted_V)), sigma_r_inv)\n\n return (sorted_U, sigma_r, sorted_V, r, iterations)", "def approximate_svd(A, U, S, V, k=10, params=None):\n\n \n A = lib.adapt(A)\n U = lib.adapt(U)\n S = lib.adapt(S)\n V = lib.adapt(V)\n\n Aobj = A.ptr()\n Uobj = U.ptr()\n Sobj = S.ptr()\n Vobj = V.ptr()\n\n if (Aobj == -1 or Uobj == -1 or Sobj == -1 or Vobj == -1):\n raise errors.InvalidObjectError(\"Invalid/unsupported object passed as A, U, S or V \")\n\n # use default params in case none are provided\n if params == None:\n params = SVDParams()\n params_json = params.str() + '\\0'\n\n lib.callsl(\"sl_approximate_svd\", \\\n A.ctype(), Aobj, \\\n U.ctype(), Uobj, \\\n S.ctype(), Sobj, \\\n V.ctype(), Vobj, \\\n k, params_json, lib.ctxt_obj)\n\n A.ptrcleaner()\n U.ptrcleaner()\n S.ptrcleaner()\n V.ptrcleaner()\n\n return (U.getobj(), S.getobj(), V.getobj())", "def svd_shrink(X, tau):\n U,s,V = np.linalg.svd(X, full_matrices=False)\n return np.dot(U, np.dot(np.diag(shrink(s, tau)), V))", "def update_S(self):\n S = svd_S(self.C)\n if self.S[self.pC].size != S.size:\n self.S[self.pC] = self._one_S(S.size)\n dS = (self.S[self.pC] - S)\n dS = np.sqrt(np.sum(dS**2))\n self.S[self.pC] = S\n return dS", "def tensor_svd_denoise(data, rank):\n # Case when SVD ranks are fed in the input, call svd_HO function to denoise\n\n if len(data.shape) == 3: # hyperspectral data case, directly feed data to svd_HO function\n [X, _, _] = svd_HO(data, rank)\n if len(data.shape) == 4: # Original 4D STEM data case, unfold reciprocal space dimensions into one dimension then feed to svd_HO function\n data = np.reshape(data, [data.shape[0], data.shape[1], data.shape[2]*data.shape[3]])\n [X, _, _] = svd_HO(data, rank)\n \n return X", "def svd_approx(A, k):", "def visualize_svd(A, n = 200):\n M = unit(n)\n E = np.array([[1,0],[0,0],[0,1]])\n E = E.T\n U, S, Vh = la.svd(A)\n S = np.diag(S)\n \n #No changes\n myPlt = plt.subplot(221)\n myPlt.plot(M[0], M[1], color = \"green\")\n myPlt.plot(E[0], E[1], color = \"red\")\n myPlt.axis(\"equal\")\n #1 multiplication\n B = Vh@M\n E = Vh@E\n myPlt = plt.subplot(222)\n myPlt.plot(B[0], B[1], color = \"green\")\n myPlt.plot(E[0], E[1], color = \"red\")\n myPlt.axis(\"equal\")\n #2 multiplications\n C = S@B\n E = S@E\n myPlt = plt.subplot(223)\n myPlt.plot(C[0], C[1], \"green\")\n myPlt.plot(E[0], E[1], color = \"red\")\n myPlt.axis(\"equal\")\n #3 multiplication\n D = U@C\n E = U@E\n myPlt = plt.subplot(224)\n myPlt.plot(D[0],D[1], color = \"green\")\n myPlt.plot(E[0], E[1], color = \"red\")\n myPlt.axis(\"equal\")\n return\n raise NotImplementedError(\"Problem 2 Incomplete\")", "def svdSoft(A, lmbda, k):\n if not scipy.sparse.issparse(A): \n raise ValueError(\"A must be a sparse matrix\")\n \n #U, s, V = scipy.sparse.linalg.svds(A, k)\n U, s, V = sparsesvd(A, k) \n U = U.T\n inds = numpy.flipud(numpy.argsort(s))\n U, s, V = Util.indSvd(U, s, V, inds) \n \n #Soft threshold \n s = s - lmbda\n s = numpy.clip(s, 0, numpy.max(s))\n\n return U, s, V", "def svd(self, leg=None, compute_uv=True, maxD=0):\n # TODO: Can I do this shit cleaner?\n from networkx.algorithms.components import is_connected, \\\n number_connected_components, connected_components\n\n SUid = self.getSymmIds('SU(2)')\n if leg:\n if leg not in self.internallegs:\n raise ValueError('Leg is not an internal one')\n\n U = Tensor(self.symmetries)\n V = Tensor(self.symmetries)\n S = {'symmetries': self.symmetries, 'leg': leg}\n\n lcid = self.coupling_id(leg)\n Skeys = set(key[lcid[0]][lcid[1]] for key in self)\n\n netw = self.get_couplingnetwork()\n assert is_connected(netw.to_undirected())\n for u, v, ll in netw.edges(data='leg'):\n if leg == ll:\n netw.remove_edge(u, v)\n break\n assert number_connected_components(netw.to_undirected()) == 2\n coupls = [tuple(c for c in G)\n for G in connected_components(netw.to_undirected())]\n Uid = [True in [(leg, False) in c for c in coupl]\n for coupl in coupls].index(True)\n Vid = [True in [(leg, True) in c for c in coupl]\n for coupl in coupls].index(True)\n assert Uid != Vid\n\n U.coupling = coupls[Uid]\n V.coupling = coupls[Vid]\n\n assert leg in U.indexes and leg in V.indexes\n U._indexes.remove(leg)\n U._indexes.append(leg)\n V._indexes.remove(leg)\n V._indexes.insert(0, leg)\n\n permindexes = U.indexes[:-1] + V.indexes[1:]\n assert set(permindexes) == set(self.indexes)\n transp = np.array([self.indexes.index(ll) for ll in permindexes])\n Uids = transp[:len(U.indexes) - 1]\n Vids = transp[len(U.indexes) - 1:]\n Umap = [self.coupling.index(c) for c in U.coupling]\n Vmap = [self.coupling.index(c) for c in V.coupling]\n\n iU = [self.coupling_id(c) for c in U.internallegs]\n iV = [self.coupling_id(c) for c in V.internallegs]\n\n def pref(key, mp):\n return np.prod(\n [np.sqrt(key[x][y][ii] + 1) for ii in SUid for x, y in mp])\n\n blockdict = {k: {} for k in Skeys}\n for k, b in self.items():\n blockdict[k[lcid[0]][lcid[1]]][k] = \\\n (b, pref(k, iU), pref(k, iV))\n\n for Skey in Skeys:\n dict_part = blockdict[Skey]\n Sprf = np.prod([np.sqrt(Skey[ii] + 1) for ii in SUid])\n\n Uslice, Ucur = {}, 0\n Vslice, Vcur = {}, 0\n for k, (b, Up, Vp) in dict_part.items():\n Ukey = tuple([k[i] for i in Umap])\n Vkey = tuple([k[i] for i in Vmap])\n\n if Ukey not in Uslice:\n Udims = [b.shape[ii] for ii in Uids]\n Ud = np.prod(Udims)\n Uslice[Ukey] = slice(Ucur, Ucur + Ud), Udims, Up\n Ucur += Ud\n if Vkey not in Vslice:\n Vdims = [b.shape[ii] for ii in Vids]\n Vd = np.prod(Vdims)\n Vslice[Vkey] = slice(Vcur, Vcur + Vd), Vdims, Vp\n Vcur += Vd\n\n memory = np.zeros((Ucur, Vcur))\n\n for k, (b, Up, Vp) in dict_part.items():\n Ukey = tuple([k[i] for i in Umap])\n Vkey = tuple([k[i] for i in Vmap])\n uslice, _, _ = Uslice[Ukey]\n vslice, _, _ = Vslice[Vkey]\n ud = uslice.stop - uslice.start\n vd = vslice.stop - vslice.start\n memory[uslice, vslice] = \\\n np.transpose(b, transp).reshape(ud, vd) / (Up * Vp)\n\n # Finally do SVD\n if compute_uv:\n u, s, v = np.linalg.svd(memory, full_matrices=False)\n else:\n s = np.linalg.svd(memory, compute_uv=False)\n S[Skey] = s / Sprf / Sprf\n continue\n\n S[Skey] = s / Sprf / Sprf\n for key, (sl, dims, Up) in Uslice.items():\n U[key] = u[sl, :].reshape(*dims, -1) * Up * Sprf\n for key, (sl, dims, Vp) in Vslice.items():\n V[key] = v[:, sl].reshape(-1, *dims) * Vp * Sprf\n\n if compute_uv:\n return self.truncate_svd(U, S, V, maxD)\n else:\n return S\n else:\n # Plain svd of a R matrix. Only calculates the singular values\n if len(self.coupling) != 1:\n raise ValueError(\n 'For SVD with no leg specified, the tensor should be a '\n 'simple one with only 1 coupling to the vacuum.')\n try:\n Sid = [l.vacuum for l in self.indexes].index(True)\n _, Scid = self.coupling_id(self.indexes[Sid])\n except ValueError:\n raise ValueError(\n 'For SVD with no leg specified, the tensor should be a '\n 'simple one with only 1 coupling to the vacuum.')\n\n if compute_uv:\n raise ValueError('For SVD with no leg only allowed for '\n 'calculating the singular values themselves.')\n\n S = {'symmetries': self.symmetries}\n Ucid = 0 if Scid != 0 else 1\n Vcid = 2 if Scid != 2 else 1\n\n def prefact(key):\n return np.prod([np.sqrt(k[ii] + 1) for ii in SUid])\n\n for key, block in self.items():\n k = key[0][Ucid]\n assert k == key[0][Vcid]\n S[k] = np.linalg.svd(np.squeeze(block, axis=Sid) / prefact(k),\n compute_uv=False)\n return S", "def __init__(self, A, rank=0):\r\n _u, _s, _v = np.linalg.svd(A, full_matrices=0)\r\n \r\n self.rank = rank\r\n\r\n self.U = _u[:,:self.rank].copy()\r\n self.S = _s[:self.rank].copy()\r\n self.SI = np.matrix(np.diag(self.S)).getI()\r\n self.VT = _v[:self.rank,:].copy()\r\n \r\n self._var = [ e/(_s**2).sum() for e in (_s**2).cumsum() ][self.rank-1]", "def _SVD_to_A(U, S, VT):\n A = np.dot(U, _dot_D_A(S, VT))\n return A", "def reduce_svd(embeddings, seed=0):\n svd = TruncatedSVD(n_components=2, n_iter=10, random_state=seed)\n return svd.fit_transform(embeddings)", "def svt(X, tau):\n U, S, Vt = la.svd(X,full_matrices=False)\n Xs = np.dot(U * st(S,tau), Vt)\n return Xs", "def svd_approx(A, k):\n U,s,Vh=la.svd(A,full_matrices=False)\n return U[:,:k].dot(np.diag(s[:k])).dot(Vh[:k,:])", "def matrix_svd(\n self,\n chis=None,\n eps=0,\n print_errors=\"deprecated\",\n break_degenerate=False,\n degeneracy_eps=1e-6,\n sparse=False,\n trunc_err_func=None,\n ):\n if print_errors != \"deprecated\":\n msg = (\n \"The `print_errors` keyword argument has been deprecated, \"\n \"and has no effect. Rely instead on getting the error as a \"\n \"return value, and print it yourself.\"\n )\n warnings.warn(msg)\n chis = self._matrix_decomp_format_chis(chis, eps)\n maxchi = max(chis)\n assert self.defval == 0\n assert self.invar\n\n # SVD each sector at a time.\n # While doing so, also keep track of a list of all singular values, as\n # well as a heap that gives the negative of the largest singular value\n # in each sector. These will be needed later when deciding how to\n # truncate the decomposition.\n svds = {}\n dims = {}\n minus_next_sings = []\n all_sings = []\n for k, v in self.sects.items():\n if 0 in v.shape:\n shp = v.shape\n m = min(shp)\n u = np.empty((shp[0], m), dtype=self.dtype)\n s = np.empty((m,), dtype=np.float_)\n v = np.empty((m, shp[1]), dtype=self.dtype)\n else:\n if sparse and maxchi < min(v.shape) - 1:\n u, s, v = spsla.svds(\n v, k=maxchi, return_singular_vectors=True\n )\n order = np.argsort(-s)\n u = u[:, order]\n s = s[order]\n v = v[order, :]\n else:\n u, s, v = np.linalg.svd(v, full_matrices=False)\n svd = (s, u, v)\n svds[k] = svd\n dims[k] = 0\n sings = svd[0]\n all_sings.append(sings)\n if 0 not in sings.shape:\n heapq.heappush(minus_next_sings, (-sings[0], k))\n try:\n all_sings = np.concatenate(all_sings)\n except ValueError:\n all_sings = np.array((0,))\n\n if sparse:\n norm_sq = self.norm_sq()\n else:\n norm_sq = None\n\n # Figure out what bond dimension to truncate to, how this bond\n # dimension is distributed over the different sectors, and what the\n # truncation error is.\n chi, dims, rel_err = type(self)._find_trunc_dim(\n all_sings,\n svds,\n minus_next_sings,\n dims,\n chis=chis,\n eps=eps,\n break_degenerate=break_degenerate,\n degeneracy_eps=degeneracy_eps,\n trunc_err_func=trunc_err_func,\n norm_sq=norm_sq,\n )\n\n # Truncate each block and create the dim for the new index.\n new_dim = []\n new_qim = []\n svds = {k: v for k, v in svds.items() if dims[k] > 0}\n for k, v in svds.items():\n d = dims[k]\n if d > 0:\n new_dim.append(d)\n new_qim.append(k[0])\n svds[k] = (v[0][:d], v[1][:, :d], v[2][:d, :])\n else:\n del svds[k]\n\n # Initialize U, S, V.\n d = self.dirs[0]\n U = type(self)(\n [self.shape[0], new_dim],\n qhape=[self.qhape[0], new_qim],\n dirs=[d, -d],\n qodulus=self.qodulus,\n dtype=self.dtype,\n charge=0,\n )\n S = type(self)(\n [new_dim],\n qhape=[new_qim],\n dirs=[d],\n qodulus=self.qodulus,\n dtype=np.float_,\n invar=False,\n charge=0,\n )\n V = type(self)(\n [new_dim, self.shape[1]],\n qhape=[new_qim, self.qhape[1]],\n dirs=[d, self.dirs[1]],\n qodulus=self.qodulus,\n dtype=self.dtype,\n charge=self.charge,\n )\n\n # Set the blocks of U, S and V.\n for k, v in svds.items():\n k_U = (k[0], k[0])\n S[(k[0],)] = v[0]\n U[k_U] = v[1]\n V[k] = v[2]\n\n return U, S, V, rel_err", "def approximate_symmetric_svd(A, S, V, k=10, params=None):\n\n \n A = lib.adapt(A)\n S = lib.adapt(S)\n V = lib.adapt(V)\n\n Aobj = A.ptr()\n Sobj = S.ptr()\n Vobj = V.ptr()\n\n if (Aobj == -1 or Sobj == -1 or Vobj == -1):\n raise errors.InvalidObjectError(\"Invalid/unsupported object passed as A, S or V \")\n\n # use default params in case none are provided\n if params == None:\n params = SVDParams()\n params_json = params.str() + '\\0'\n\n lib.callsl(\"sl_approximate_symmetric_svd\", \\\n A.ctype(), Aobj, \\\n S.ctype(), Sobj, \\\n V.ctype(), Vobj, \\\n k, params_json, lib.ctxt_obj)\n\n A.ptrcleaner()\n S.ptrcleaner()\n V.ptrcleaner()\n\n return (S.getobj(), V.getobj())", "def tsvd(A, threshold=0.99999, avoid_pathological=True):\n M,N = A.shape\n full_matrices = False\n\n if is_int(threshold):\n # Assume specific number is requested\n r = threshold\n assert 1 <= r <= max(M,N)\n if r > min(M,N):\n full_matrices = True\n r = min(M,N)\n\n U,s,VT = sla.svd(A, full_matrices)\n\n if isinstance(threshold,float):\n # Assume proportion is requested\n r = truncate_rank(s,threshold,avoid_pathological)\n\n # Truncate\n U = U [:,:r]\n VT = VT[ :r]\n s = s [ :r]\n return U,s,VT", "def run(self, dim):\n conv_mat = np.dot(np.transpose(self._data), self._data)\n u, d, v = np.linalg.svd(conv_mat)\n return u[:, :dim], d[:dim], v[:dim]", "def invert_L1_svd():", "def compute_svd(self,data,k):\n m, n =data.shape\n n = self.comm1.allreduce(n)\n print(m,n)\n if k==-1:\n k = min(m,n)\n args = parse()\n args.m,args.n,args.k,args.comm = m,n,k,self.comms\n args.eps = np.finfo(data.dtype).eps\n if args.m<args.n: args.p_r,args.p_c = 1,self.size\n dsvd = DistSVD(args, data)\n singularValues, U, V = dsvd.svd()\n rel_error = dsvd.rel_error(U, np.diag(singularValues), V)\n if self.global_rank==0: print('relative error is:', rel_error )\n return singularValues,U,V,rel_error", "def ssa_decompose(y, dim):\n n = len(y)\n t = n - (dim - 1)\n\n yy = linalg.hankel(y, np.zeros(dim))\n yy = yy[:-dim + 1, :] / np.sqrt(t)\n\n # here we use gesvd driver (as in Matlab)\n _, s, v = linalg.svd(yy, full_matrices=False, lapack_driver='gesvd')\n\n # find principal components\n vt = np.matrix(v).T\n pc = np.matrix(yy) * vt\n\n return np.asarray(pc), s, np.asarray(vt)", "def v_o(A,vd):\n return A*vd", "def evd_spd_decomposition(P):\n\t\n\t# Assert Matrix P is symetric\n\tassert check_symmetric(P)\t\n\n\t# singular value decomposition\n\tL, Q = np.linalg.eig(P)\n\n\t#if L and Q returned in incorrect order\n\t#L = np.sort(L)\n\t#Q = Q[:, L.argsort()]\n\n\t# Create matrix W = Vtsqrt(diagnol(D))\n\tM = np.dot(Q, np.sqrt(np.diag(L)))\n\n\treturn M", "def spca(a, s, k, d):\n\n p = a.shape[0]\n X = np.zeros((p, k))\n\n for l in range(k):\n # 1\n [w, V] = linalg.eigh(a)\n idx = w.argsort()\n w = w[idx]\n V = V[:, idx]\n\n # 2\n xprime, value = spannogram(V[:, -d:], w[-d:], s=s)\n X[:, l] = xprime[:, 0]\n\n # 3\n idx = np.abs(xprime).argsort(axis=0)\n for i in idx[-s:]:\n a[i, :] = 0\n a[:, i] = 0\n\n return X", "def test_svd(self):\n eigenvectors, eigenvalues = self.svd.run(self.test_matrix)\n\n self.assertEqual(eigenvectors.shape, (100, 100))\n self.assertEqual(eigenvalues.shape, (100,))", "def singular_decomp(A):\n # Initialization\n n, m = A.shape\n U = np.zeros((n, m), dtype='float64')\n\n # Diagonalization of A^T * A\n rot, e, V = eigen.diag(np.dot(np.transpose(A), A))\n\n # Calculate U\n U = np.dot(A, V)\n for i in range(m):\n e[i] = np.sqrt(e[i])\n U[:, i] /= e[i]\n\n return U, e, V", "def rebuild_svd(self, U, S, V, k): # [5pts]\n \n N,D = U.shape[0],V.shape[0]\n \n \n if U.ndim == 3:\n Xrebuild = np.zeros((N,D,3))\n for i in range(3):\n U_temp = U[:,0:k,i]\n S_temp = S[:,i]\n S_temp = np.diag(S_temp[0:k])\n V_temp = V[0:k,:,i]\n Xrebuild_temp = U_temp@S_temp@V_temp\n Xrebuild[:,:,i] = Xrebuild_temp\n else:\n U_new = U[:,0:k]\n S_new = np.diag(S[0:k])\n V_new = V[0:k,:]\n Xrebuild = U_new@S_new@V_new\n\n return Xrebuild", "def SingularValueDecomp(x, n_comp):\r\n # 7.1 Create an object to perform SVD\r\n svd = TruncatedSVD(n_components = n_comp)\r\n # 7.2 Fit and transform\r\n g = svd.fit_transform(x)\r\n # 7.3 How much variance is explained per-component\r\n ev1 = svd.explained_variance_ratio_\r\n # Return a tuple of three values\r\n return (g, svd, ev1)", "def GenerateS(s): \n for i in range(len(x_train)):\n for j in range(len(SVs)):\n index = int(SVs[j])\n s[i] = s[i] + alpha[index]*y_train[index] * Kernel(i, index) # (DotProduct(i, index)+1)**5\n return s", "def calc_image_svd(img:list):\r\n img_r = img.swapaxes(1, 2).swapaxes(0, 1)\r\n svd_u, svd_s, svd_vh = np.linalg.svd(img_r, full_matrices=True)\r\n return [svd_u, svd_s, svd_vh]", "def truncated_svd(A,k=None):\n \n \n \n AHA=np.conj(A).T.dot(A)\n evals,evecs=la.eig(AHA)\n order=np.argsort(evals)\n\n evals=evals[order][::-1].copy()\n evecs=evecs.T[order][::-1].copy()\n m,n=AHA.shape\n \n tol=1e-12\n Vh=[]\n for i in xrange(0,m):\n\t\t if np.abs(evals[i])>=tol:\n\t \t\tVh+=[evecs[i]]\n \n Vh=np.array(Vh)\n s=np.sqrt(evals[:Vh.shape[0]])\n U=[]\n for i in xrange(0,len(s)):\n U+=[(1./s[i])*A.dot(Vh[i])]\n U=np.array(U).T\n \n return U,s,Vh", "def posdef_eig_svd(mat):\n evals, evecs, _ = tf.svd(mat)\n\n return evals, evecs", "def svd_inverse(matrix):\n U, S, V = np.linalg.svd(matrix)\n\n dim = S.shape[0]\n S = la.diagsvd(S, dim, dim)\n V = np.matrix(V)\n U = np.matrix(U)\n\n # Compute the inverse SVD\n V_dag_S = np.dot(V.getH(), np.linalg.inv(S))\n V_dag_S_U_dag = np.dot(V_dag_S, U.getH())\n\n return V_dag_S_U_dag", "def reduce_dimentions(users):\n\tsvd = TruncatedSVD(n_components=300, n_iter=10, random_state=42)\n\tsvd.fit(users)\n\tusers_svd = svd.transform(users)\n\tusers_svd = pd.DataFrame(users_svd, index=users.index)\n\treturn users_svd", "def svd_HO(data, rank, max_iter=10):\n svd_iter = 10\n data_shape = np.shape(data) # p0\n\n # Check that number of dimensions match the number of rank numbers\n if len(data_shape) != len(rank):\n print(\"The rank should be the same size as the data shape\")\n return data, [], []\n\n # Check that for each rank, the product of all the rest ranks are larger than this rank\n for k in range(len(rank)):\n prod = 1\n for i in range(len(rank)):\n if i != k:\n prod = prod * rank[i]\n if rank[k] > prod:\n print(\"The rank does not satisfy requirment of HOOI.\")\n return data, [], []\n\n dimensions = len(data_shape) # d\n ordered_indexes = np.argsort(data_shape) # getting the indicies from min len to max, initialization starts from smallest size\n\n ## Initialize U and Y with SVD\n U = [None] * dimensions # Generate an empty array to save all the U matrices with fixed length\n X = data\n for k in ordered_indexes: # calculating initial SVD\n unfolded = unfold_axis(X, k) # unfolding from the axis with minimum size\n [U[k], _ , _] = fast_svd(unfolded,rank[k],n_iter=svd_iter)\n X = ttm(X, np.transpose(U[k]), k) # This needs to be fixed!\n\n ## Update U with HOOI\n iter_count = 0\n while iter_count < max_iter:\n iter_count += 1\n for k in range(0, dimensions):\n Y = data\n minus_k = list(range(0,dimensions))\n minus_k.remove(k) # every value except for k, seems do it in one step will remove all the elements in the list.\n for j in minus_k:\n Y = ttm(Y, np.transpose(U[j]), j)\n MY = unfold_axis(Y, k)\n [U[k], _, _] = fast_svd(MY, rank[k],n_iter=svd_iter)\n\n ## Use the determined U matrices to calculate core tensor and denoised tensor\n X = data\n for k in ordered_indexes:\n X = ttm(X,np.transpose(U[k]), k) # Check this part.\n S = X # core tensor\n for k in range(0,dimensions):\n X = ttm(X,U[k], k)\n\n return X, U, S", "def svd_transform(vector, indexes):\n\n svd_vector = pandas.DataFrame(TruncatedSVD(n_components=args.svd).fit_transform(X=vector), index=indexes.keys()) #.to_csv(path_or_buf=args.o)\n return svd_vector", "def spcaold(a, s, k, d):\n\n p = a.shape[0]\n X = np.zeros((p, k))\n\n for l in range(k):\n # 1\n [w, V] = linalg.eigh(a)\n idx = w.argsort()\n w = w[idx]\n V = V[:, idx]\n\n # 2\n xprime, value = spannogram(V[:, -d:], w[-d:])\n\n # 4\n idx = np.abs(xprime).argsort(axis=0)\n for i in idx[:-s]:\n xprime[i] = 0\n\n X[:, l] = xprime[:, 0]\n\n # 5\n for i in idx[-s:]:\n a[i, :] = 0\n a[:, i] = 0\n\n return X", "def update_model(X, U, S, k, n, mu,\n svdmethod='full',\n missingmethod='zero'):\n\n if len(X) == 0:\n printt(\"Error: No data in X.\")\n return None, None, None, -1, None\n #print('%d items in X' % X.shape[1])\n #print('init U:', U)\n\n # If there is no previous U, and we just got a single item in X,\n # set U to all 0's (degenerate SVD),\n # and return it with mu.\n # (PR #22 sets first value to 1; see decals implementation)\n if len(U) == 0 and X.shape[1] == 1:\n mu = X\n # Do this no matter what. Let mu get NaNs in it as needed.\n U = np.zeros_like(mu)\n U[0] = 1\n S = np.array([0])\n n = 1\n pcts = [1.0]\n return U, S, mu, n, pcts\n\n ###########################################################################\n # Do full SVD of X if this is requested, regardless of what is in U \n # Also, if n = 0 or U is empty, start from scratch\n output_k = False\n if svdmethod == 'full' or len(U) == 0 or n == 0:\n if n == 0:\n if len(U) == 0:\n printt(\"----- initial SVD -----\")\n output_k = True\n else:\n # Reshape so we don't have an empty dimension (yay python)\n U = U.reshape(-1, 1)\n elif len(U) == 0:\n printt(\"WARNING: N (number of items modeled by U) is %d, not zero, but U is empty!\" % n)\n\n # Bootstrap\n if missingmethod == 'ignore':\n printt(\"ERROR: ignore with full is not possible under ordinary circumstances.\")\n printt(\"Use --increm-brand to impute for NaNs.\")\n printt(\"For now, we are filling NaNs with 0.\")\n X = copy.deepcopy(X)\n z = np.where(np.isnan(X))\n X[z] = 0\n\n mu = np.mean(X, axis=1).reshape(-1,1)\n X = X - mu\n U, S, V = linalg.svd(X, full_matrices=False)\n printt('Just did full SVD on %d items.' % X.shape[1])\n #print('X:', X)\n #print('U:', U)\n # Reset U to all 0's if we only have one item in X (degenerate SVD)\n if X.shape[1] == 1:\n U = np.zeros_like(U)\n \n # Keep only the first k components\n S_full = S\n S = S[0:k]\n U = U[:,0:k]\n\n # Update n to number of new items in X\n n = X.shape[1]\n \n ###########################################################################\n # Incremental SVD from Ross\n elif svdmethod == 'increm-ross':\n # Incremental SVD from Ross et al. 2008\n # \"Incremental Learning for Robust Visual Tracking\"\n # based on Lim and Ross's sklm.m implementation in MATLAB.\n\n # This method DOES NOT handle missing values.\n if missingmethod == 'ignore':\n print('ERROR: increm-ross cannot handle missing values.')\n print('If they are present, try svdmethod=increm-brand')\n print(' or use missingmethod=zero to zero-fill.')\n print('If there are no missing values, specify missingmethod=none.')\n sys.exit(1)\n\n n_new = X.shape[1]\n \n # Compute mean\n # Weirdly, the later 'X-mu_new' is MUCH faster if you reshape as shown.\n # This is because of differences in the way numpy treats a 1d array versus a 2d column.\n mu_new = np.mean(X, axis=1).reshape(-1,1)\n\n # Subtract the mean, append it as a column vector, and update mu\n # X - mu_new will be zero if X has only 1 item\n mu_old = mu\n # New mu is a weighted sum of old and new mus\n mu = (n * mu_old + n_new * mu_new) / (n + n_new)\n B = np.hstack((X - mu,\n math.sqrt(n_new * n/float(n_new+n)) * \\\n (mu_old - mu_new)))\n printt(\"Now tracking mean for %d -> %d items; mu.min %f, mu.max %f \" % \\\n (n, n+n_new, np.nanmin(mu), np.nanmax(mu)))\n n = n + n_new\n\n if S.all() == 0:\n npcs = U.shape[1]\n diagS = np.zeros((npcs, npcs))\n else:\n diagS = np.diag(S)\n\n # I don't think this is right. At this point B is the augmented\n # matrix rather than the single observation.\n proj = np.dot(U.T, B)\n reproj_err = B - np.dot(U, proj)\n\n # to get orthogonal form of reproj_err\n # This should return q with dimensions [d(X) by n_new+1], square\n q, dummy = linalg.qr(reproj_err, mode='full')\n # print('q.shape should be 7x2: ', q.shape)\n Q = np.hstack((U, q))\n\n # From Ross and Lim, 2008\n # R = [ [ Sigma, U.T * X ] [ 0, orthog. component of reproj error ] ]\n k_now = diagS.shape[0]\n new_dim = k_now + n_new + 1\n R = np.zeros((new_dim, new_dim))\n R[0:k_now,0:k_now] = diagS\n R[0:k_now,k_now:] = proj\n orthog_reproj_err = np.dot(q.T, reproj_err)\n R[k_now:, k_now:] = orthog_reproj_err\n \n # Perform SVD of R. Then finally update U.\n U, S, V = linalg.svd(R, full_matrices=False)\n printt('Just did increm-ross SVD on %d items.' % n)\n\n U = np.dot(Q, U)\n \n # Keep only the first k components\n U = U[:,0:min([n,k])]\n S_full = S\n S = S[0:min([n,k])]\n\n ###########################################################################\n # Incremental SVD from Brand\n elif svdmethod == 'increm-brand':\n # Pulled out James's attempt to handle NaNs into\n # increm-brand-james.py. Starting over from scratch here.\n n_new = X.shape[1]\n\n if n_new != 1:\n print(\"WARNING: increm-brand will probably only work by adding one item at a time.\")\n input('\\nPress enter to continue or ^C/EOF to exit. ')\n\n if missingmethod == 'ignore':\n # 1. Update mu\n mu_old = mu\n mu_new = X\n\n # Be careful! For any pre-existing NaNs in mu,\n # let mu_new fill them in. Can't get any worse!\n naninds = np.where(np.isnan(mu_old))[0]\n if naninds.size > 0:\n mu_old[naninds,0] = mu_new[naninds,0]\n # And likewise for mu_new -- fill with good values from mu_old.\n naninds = np.where(np.isnan(mu_new))[0]\n if naninds.size > 0:\n mu_new[naninds,0] = mu_old[naninds,0]\n # At this point, the only NaNs that should appear are\n # values that were NaN for both mu and X to start with.\n # They will stay NaN and that's okay.\n \n # New mu is a weighted sum of old and new mus\n mu = (n * mu_old + n_new * mu_new) / (n + n_new)\n printt(\"Now tracking mean for %d -> %d items; mu.min %f, mu.max %f \" % \\\n (n, n+n_new, np.nanmin(mu), np.nanmax(mu)))\n n = n + n_new\n\n # 2. Subtract off the mean\n X = X - mu\n\n # 3. Compute L, the projection of X onto U\n # Note: this will only work for a single item in X\n goodinds = np.where(~np.isnan(X))[0]\n #print('X: %d of %d are good.' % (len(goodinds), X.shape[0]))\n\n diagS = np.diag(S)\n # This is Brand's method, which involves S:\n L = np.dot(diagS,\n np.dot(np.linalg.pinv(np.dot(U[goodinds,:],\n diagS)),\n X[goodinds,:]))\n # Simplified version that does not use S (but is probably wrong):\n #L = np.dot(U[goodinds,:].T,\n # X[goodinds,:])\n # Top row of the Q matrix (eqn 12, Brand 2002)\n Q1 = np.hstack([diagS, L])\n\n # 4. Compute J, the orthogonal basis of H, which is\n # the component of X orthog to U (i.e., unrepresentable direction)\n # 5. Compute K, the projection of X onto J (i.e., unrep. content)\n K = linalg.norm(X[goodinds,:] - np.dot(U[goodinds,:],\n np.dot(U[goodinds,:].T,\n X[goodinds,:])))\n # H = X - UL\n J = np.zeros((U.shape[0], 1))\n J[goodinds] = np.dot(K,\n np.linalg.pinv(X[goodinds,:] -\n np.dot(U[goodinds,:],\n L))).T\n \n # Bottom row of Q matrix (eqn 12, Brand 2002)\n Q2 = np.hstack([np.zeros([1, len(S)]), np.array(K).reshape(1,1)])\n Q = np.vstack([Q1, Q2])\n\n # 6. Take the SVD of Q\n Uq, Sq, Vq = linalg.svd(Q, full_matrices=False)\n\n # 7. Update U and S (eqn 4, Brand 2002)\n # Note: Since J is zero-filled for badinds, now U is too.\n # Alternatively, we give J NaNs and let them get into U as well.\n # I think that is a worse idea though.\n U = np.dot(np.hstack([U, J]), Uq)\n S = Sq\n # Updating V requires knowing old V,\n # but we don't need the new one either so it's okay to skip.\n\n printt('Just did increm-brand SVD on %d items.' % n)\n \n ############# end ###########\n \n else: # No missing values (or not 'ignore')\n # 1. Update mu\n mu_old = mu\n mu_new = X\n # New mu is a weighted sum of old and new mus\n mu = (n * mu_old + n_new * mu_new) / (n + n_new)\n n = n + n_new\n\n # 2. Subtract off the mean\n X = X - mu\n\n # 3. Compute L, the projection of X onto U\n L = np.dot(U.T, X)\n Q1 = np.hstack([np.diag(S), L])\n\n # 4. Compute J, the orthogonal basis of H, which is\n # the component of X orthog to U (i.e., unrepresentable direction)\n # 5. Compute K, the projection of X onto J (i.e., unrep. content)\n JK = X - np.dot(U, L)\n (J, K) = linalg.qr(JK)\n\n Q2 = np.hstack([np.zeros([1, len(S)]), np.array(K).reshape(1,1)])\n Q = np.vstack([Q1, Q2])\n\n # 6. Take the SVD of Q\n Uq, Sq, Vq = linalg.svd(Q, full_matrices=False)\n\n # 7. Update U and S (eqn 4, Brand 2002)\n U = np.dot(np.hstack([U, J]), Uq)\n S = Sq\n # V requires knowing old V,\n # but we don't need the new one either so it's okay.\n \n printt('Just did regular increm SVD on %d items.' % n)\n\n # Keep only the first k components\n U = U[:,0:min([n,k])]\n S = S[0:min([n,k])]\n\n Usum = U.sum(1)\n\n\n ###########################################################################\n # We have a bad svdmethod, but somehow didn't catch it earlier.\n else:\n printt(\"504: Bad Gateway in protocol <Skynet_authentication.exe>\")\n return None, None, None, None, None\n\n indivpcts = None\n\n # This only works if a full SVD was done\n if (svdmethod == 'full' and output_k and opts['k_var'] == -773038.0):\n # Calculate percent variance captured by each \n cumsum = np.cumsum(S_full)\n #print(cumsum.shape)\n if cumsum[-1] != 0:\n indivpcts = S / cumsum[-1]\n indivpcts = indivpcts[0:k] # truncate to first k\n cumpercents = cumsum / cumsum[-1]\n else:\n indivpcts = []\n\n # Calculate percent variance captured\n if k >= cumsum.shape[0]:\n printt('Cannot estimate data variance; specified k (%d) exceeds the number of SVs (%d).' % (k, cumsum.shape[0]))\n else:\n printt(\"Selected value of k=%d captures %5.2f%% of the data variance\" % \\\n (k, cumpercents[k-1] * 100))\n if opts['pause']: input(\"Press enter to continue\\n\")\n\n #print('U:', U)\n #print('mu:', mu)\n return U, S, mu, n, indivpcts", "def fit_svd(self):\n\n # U has the eigenvectors of G.Gt as columns ()\n # S has square roots of the eigenvalues of G.Gt and Gt.G in its diagonal\n # The square roos of the eigenvalues are called singular values\n # V has the eigenvectors of Gt.G as columns ()\n # full_matrices set to false will set the Vt matrix to a shape m x n\n\n U, S, Vt = linalg.svd(self.norm_matrix, full_matrices=False)\n\n # Compute the eigenvalues\n eig_val = (S ** 2)\n\n # Explained_variance tell us how much of the variance in the data each eigen value explains\n explained_variance = eig_val / (self.n_samples - 1)\n # total_var is the total variance in the data\n total_var = explained_variance.sum()\n explained_variance_ratio = explained_variance / total_var\n # The cumulative sum of all ratios\n ratio_cumsum = np.cumsum(explained_variance_ratio)\n\n # We search in the cumsum for the index of the value which, when added, corresponds to the quality_percent\n # The index of the cumsum gives us the components we need to add to explain X quality percent of our data\n n_components = np.searchsorted(ratio_cumsum, self.quality_percent, side='right') + 1\n\n self.components = Vt[:n_components]\n print(\"The principal components have been calculated using svd\", self.components.shape)\n\n return self.components", "def dV(X):\n return -4 * a * np.power(X, 3) + 2 * b * X", "def S(t, d):\n out = 0.\n for i in range(m): #: Iterate over 0-indexed point indices\n out += P[i][d]*b_n(t, i, V)\n return out", "def incremental_svd(A, qr_flg=False):\n\n m = 256\n n = 7291\n\n n0 = 256\n\n if A.shape[0] != m or A.shape[1] != n: raise ValueError('Error: incorrect matrix size')\n\n start = time.clock()\n\n A0 = A[:, :n0]\n U, s, V = ln.svd(A0, full_matrices=False)\n\n # NOTE: s is a vector; np.diag(s) will produce a diagonal matrix\n for i in range(n0, n):\n\n # new matrix is just a single vector (i-th column of A)\n A1 = np.matrix(A[:, i]).T\n\n if qr_flg:\n J, K = ln.qr(A1 - np.dot(np.dot(U, U.T), A1))\n U_, s_, V_ = ln.svd(\n np.vstack((\n np.hstack((np.diag(s), np.dot(U.T, A1))),\n np.hstack((np.zeros((K.shape[0], s.shape[0])), K))\n )),\n full_matrices=False)\n\n # update the result of SVD\n U = np.dot(np.hstack((U, J)), U_)\n\n else:\n U_, s_, V_ = ln.svd(np.hstack((np.diag(s), np.dot(U.T, A1))), full_matrices=False)\n U = np.dot(U, U_)\n\n s = s_\n\n # NOTE: V from svd on NumPy is already transposed\n V = np.dot(V_,\n np.vstack((\n np.hstack((V, np.zeros((V.shape[0], i+1-V.shape[1])))),\n np.hstack((np.zeros((V_.shape[1]-V.shape[0], V.shape[1])), np.eye(V_.shape[1]-V.shape[0], i+1-V.shape[1])))\n ))\n )\n\n # for next computation, update A0\n A0 = np.hstack((A0, A1))\n\n elapsed_time = time.clock() - start\n print 'time:', elapsed_time\n\n return U, s, V", "def computeTruncatedSVD(docTermMatrix, dim=500):\r\n T, S, D = np.linalg.svd(np.transpose(docTermMatrix), full_matrices=False)\r\n\r\n diagS = np.diag(S)\r\n shape = np.shape(diagS)\r\n\r\n if dim <= shape[0] and dim <= shape[1]:\r\n subT = T[:,:dim]\r\n subS = diagS[:dim,:dim]\r\n subD = np.transpose(D)[:,:dim]\r\n else:\r\n subT = T\r\n subS = diagS\r\n subD = np.transpose(D)\r\n\r\n return subT, subS, subD", "def apply_svd(self, n):\n \n ## should really handle svd sensibly if we have multiple traces\n ## fitting multiple traces simultaneously requires they all have the\n ## same basis. Could pick the first trace to define the basis\n #svd_trace, s, self.rs_vectors = np.linalg.svd(self.traces[0], full_matrices=True)\n #transformed_traces = [svd_trace[:,:n]]\n #if len(self.traces > 1):\n # # haven't tested this at all it's probably a bug filled mess\n # # idea is to represent all the traces with the principle components\n # # defined by the first set of traces\n # transformed_traces += [self.rs_vectors.dot(t)[:,:n] for t in self.traces[1:]] \n\n # or look for svd like transformation to apply the the entire block of traces?\n\n # either way current approach is totally dodgey if fitting against \n # multiple svd transformed traces\n\n transformed_traces = []\n # wavelengths now correspond to principle components\n \n for trace in self.traces:\n U,s,V = np.linalg.svd(trace, full_matrices=True)\n transformed_traces.append(U[:,:n])\n \n self.traces = transformed_traces\n self.wavelengths = np.arange(n)", "def sdot(s):\n\n mu = 398600.4405\n r = np.linalg.norm(s[0:3])\n a = -mu/(r**3)*s[0:3]\n\n p_j2 = j2_pert(s)\n p_drag = drag(s)\n\n a = a+p_j2+p_drag\n return np.array([*s[3:6],*a])", "def compact_svd(A, tol=1e-6):\r\n eigs, vecs = la.eig(A.conj().T@A)\r\n svs = np.sqrt(eigs)\r\n #sort eigenvalues and eigenvectors accordingly\r\n sorter = list(zip(svs,vecs.T))\r\n sorter.sort(reverse=True, key=lambda tup: tup[0])\r\n svs = [x[0] for x in sorter]\r\n vecs = [x[1] for x in sorter]\r\n #find number of nonzero eigenvalues\r\n r_not = svs.count(0)\r\n r = len(svs) - r_not\r\n svs_1 = np.array(svs[:r])\r\n vecs_1 = np.array(vecs[:r])\r\n u_1 = (A@vecs_1)/svs_1\r\n\r\n return u_1, svs_1, vecs_1.conj().T", "def get_singular_values(matrix, n):\n singular_values = None\n u, s, v = svd(matrix)\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n return singular_values", "def fs2ps2D(px, s):\n\t\tsfun = psarclength(px)\t\n\t\treturn sfun-s", "def f_SIRT_D(u,parametres,A):\r\n S = u[:,0]\r\n I = u[:,2]\r\n R = u[:,3]\r\n T = u[:,4]\r\n N = parametres[0]\r\n demo = parametres[1]\r\n beta = parametres[2]\r\n gamma = parametres[3]\r\n delta = parametres[5]\r\n eta = parametres[6]\r\n tau = parametres[7]\r\n D = parametres[8]\r\n dS_dt = demo*N - beta*(I+delta*T)*S/N - demo*S\r\n dI_dt = beta*(I+delta*T)*S/N - (tau+gamma)*I - demo*I\r\n dR_dt = gamma*I + eta*T - demo*R\r\n dT_dt = tau*I - eta*T - demo*T\r\n # La diffusion se calcule uniquement avec la matrice A, avec condition de gradient nul au bord\r\n return np.array([[dS_dt[i],0,dI_dt[i],dR_dt[i],dT_dt[i]] for i in range(len(I))]) + D*np.dot(A,u)", "def duv(self, u, v):\n return np.column_stack(\n [_.ev(u, v, dx=1, dy=1) for _ in (self.splz, self.sply, self.splx)])", "def svec(A):\n \n n = A.shape[0]\n B = A.copy()\n B[np.triu_indices(n, 1)] = B[np.triu_indices(n, 1)] * np.sqrt(2)\n return B[np.triu_indices(n)]", "def dvv(self, u, v):\n return np.column_stack(\n [_.ev(u, v, dy=2) for _ in (self.splz, self.sply, self.splx)])", "def computesvd(train, K):\n mean = global_mean(train)\n train = standardize(train, mean)\n std = compute_std(train)\n train = div_std(train)\n\n U, s, Vt = sp.linalg.svds(train, k=K)\n\n dim = (len(s), len(s))\n S = np.zeros(dim)\n for i in range(len(s)):\n S[i, i] = mt.sqrt(s[i])\n\n U = sp.lil_matrix(U, dtype=np.float32) # dim(M,k)\n S = sp.lil_matrix(S, dtype=np.float32) # dim(k,k)\n Vt = sp.lil_matrix(Vt, dtype=np.float32) # dim(k,N)\n\n user_features = S.dot(Vt)\n item_features = np.transpose(U.dot(S))\n\n return user_features, item_features, mean, std", "def dolp(s,axis=0):\n s0 = np.take(s,0,axis)\n s12 = np.take(s,(1,2),axis)\n return np.linalg.norm(s12,axis=axis)/s0", "def shrinkage_rank(D, alambda):\r\n U, s, V = LA.svd(D, full_matrices=False)\r\n s1 = np.maximum(0, s - alambda)\r\n return np.dot(U, np.dot(np.diag(s1), V))", "def create_sdm(fv_mat, num_fv_per_shingle):\n [num_rows, num_columns] = fv_mat.shape\n if num_fv_per_shingle == 1:\n mat_as = fv_mat\n else:\n mat_as = np.zeros(((num_rows * num_fv_per_shingle),\n (num_columns - num_fv_per_shingle + 1)))\n for i in range(1, num_fv_per_shingle+1):\n # Use feature vectors to create an audio shingle\n # for each time step and represent these shingles\n # as vectors by stacking the relevant feature\n # vectors on top of each other\n mat_as[((i-1)*num_rows+1)-1:(i*num_rows), : ] = fv_mat[:, i-1:(num_columns- num_fv_per_shingle + i)]\n\n sdm_row = spd.pdist(mat_as.T, 'cosine')\n self_dissim_mat = spd.squareform(sdm_row)\n return self_dissim_mat", "def S1(A,B):\n C = np.subtract(A,B)\n s = np.linalg.svd(C)[1]\n return (np.sum(s))", "def svd_genre_actor(self, genre):\n genre_actor_frame = self.get_genre_actor_data_frame()\n rank_weight_dict = self.assign_rank_weight(genre_actor_frame[['movieid', 'actor_movie_rank']])\n genre_actor_frame = self.combine_computed_weights(genre_actor_frame, rank_weight_dict, \"TFIDF\", genre)\n temp_df = genre_actor_frame[[\"movieid\", \"actorid_string\", \"total\"]].drop_duplicates()\n genre_actor_tfidf_df = temp_df.pivot(index='movieid', columns='actorid_string', values='total')\n genre_actor_tfidf_df = genre_actor_tfidf_df.fillna(0)\n\n genre_actor_tfidf_df.to_csv('genre_actor_matrix.csv', index=True, encoding='utf-8')\n\n df = pd.DataFrame(pd.read_csv('genre_actor_matrix.csv'))\n df1 = genre_actor_tfidf_df.values[:, :]\n row_headers = list(df[\"movieid\"])\n column_headers = list(df)\n del column_headers[0]\n\n column_headers_names = []\n\n for col_head in column_headers:\n col_head_name = util.get_actor_name_for_id(int(col_head))\n column_headers_names = column_headers_names + [col_head_name]\n\n (U, s, Vh) = util.SVD(df1)\n\n # To print latent semantics\n latents = util.get_latent_semantics(4, Vh)\n util.print_latent_semantics(latents, column_headers_names)\n\n u_frame = pd.DataFrame(U[:, :4], index=row_headers)\n v_frame = pd.DataFrame(Vh[:4, :], columns=column_headers)\n u_frame.to_csv('u_1b_svd.csv', index=True, encoding='utf-8')\n v_frame.to_csv('vh_1b_svd.csv', index=True, encoding='utf-8')\n return (u_frame, v_frame, s)", "def dSIRD(s, i, r, d, beta, gamma, mu, dt):\n \n N = s + i + r + d\n return np.array([-beta / N * i * s,\n beta / N * i * s - (gamma+mu) * i,\n gamma * i,\n mu * i]) * dt", "def _prep_data_svs(d):\n data = np.expand_dims(d.data, axis=0)\n data = np.expand_dims(data, axis=0)\n data = np.expand_dims(data, axis=0)\n return data", "def get_singular_values(matrix, n):\n singular_values = None\n u, s, v = svd(matrix)\n ### YOUR CODE HERE\n singular_values=s[0:n]\n ### END YOUR CODE\n return singular_values", "def test_svd_smoothing(self):\n\t\t\n\t\t# 819 =~ 4096*0.2\n\t\tself.watcher.SVDSmoothing(layers=[self.fc2_layer])\n\t\tesd = self.watcher.get_ESD(layer=self.fc2_layer) \n\t\tnum_comps = len(esd[esd > 10**-10])\n\t\tself.assertEqual(num_comps, 819)", "def update_stats(self): # todo: split into separate stats/svd updates\n s = self\n ops = []\n\n # update covariances\n # s.grad.update() # TODO: not needed\n # s.grad2.update()\n \n for var in s:\n ops.append(s[var].A.cov_update_op)\n ops.append(s[var].B2.cov_update_op)\n\n with u.timeit(\"covariances\"):\n u.run(ops)\n\n # update SVDs\n corrected_vars = list(s)\n with u.timeit(\"svd\"):\n with s.write_lock():\n for var in s:\n if not dont_update_first_layer or s[var].A.svd.update_counter==0:\n s[var].A.svd.update()\n s[var].B2.svd.update()", "def svd_compress_gs(mat, k):\n U, singular_vals, V = np.linalg.svd(mat)\n rank = len(singular_vals)\n print(\"Image rank %r\" % rank)\n if k > rank:\n print(\"k is larger than rank of image %r\" % rank)\n return mat\n # take columns less than k from U\n U_p = U[:, :k]\n # take rows less than k from V\n V_p = V[:k, :]\n # build the new S matrix with top k diagnal elements\n S_p = np.zeros((k, k), mat.dtype)\n for i in range(k):\n S_p[i][i] = singular_vals[i]\n print(\"U_p shape {0}, S_p shape {1}, V_p shape {2}\".format(\n U_p.shape, S_p.shape, V_p.shape))\n compressed = np.dot(np.dot(U_p, S_p), V_p)\n ss = ssim(mat, compressed,\n dynamic_range=compressed.max() - compressed.min())\n print(\"Strucural similarity: %r\" % ss)\n return U_p, S_p, V_p, ss", "def sd(self, v):\n return np.sqrt(np.dot(self.mat_var, v) + self.var_ext)", "def euler_sde(self, x, rv_n):\n n = self.mp.params[0]; k = self.mp.params[1];\n gamma = self.mp.params[2]; dt = self.mp.params[3];\n\n if x.get_shape()[1] > 1:\n evolve_fun = self.evolve_system\n else:\n evolve_fun = self.evolve\n\n dx = dt * self.evolve(x, n, k, gamma)\n x = x + dx + tf.sqrt(dt)*x*rv_n\n return tf.cast(x, tf.float32)", "def svd_compress_gs(mat, k):\n U, singular_vals, V = np.linalg.svd(mat)\n rank = len(singular_vals)\n print(\"Image rank %r\" % rank)\n if k > rank:\n print(\"k is larger than rank of image %r\" % rank)\n return mat\n # take columns less than k from U\n U_p = U[:, :k]\n # take rows less than k from V\n V_p = V[:k, :]\n # build the new S matrix with top k diagnal elements\n S_p = np.zeros((k, k), mat.dtype)\n for i in range(k):\n S_p[i][i] = singular_vals[i]\n print(\"U_p shape {0}, S_p shape {1}, V_p shape {2}\".format(\n U_p.shape, S_p.shape, V_p.shape))\n compressed = np.dot(np.dot(U_p, S_p), V_p)\n ss = ssim(mat, compressed,\n dynamic_range=compressed.max() - compressed.min())\n print(\"Strucural similarity: %r\" % ss)\n return U_p, S_p, V_p", "def _apply_rank(U, S, VT, r, verbose=False):\n if r is None:\n r = len(S)\n S_r = S[:r]\n U_r = U[:, :r]\n VT_r = VT[:r]\n if verbose:\n print(\"Rank:\", r, \"SVD shape:\", U_r.shape, S_r.shape, VT_r.shape)\n return U_r, S_r, VT_r", "def orth(A):\n u,s,vh = svd(A)\n M,N = A.shape\n tol = max(M,N)*numpy.amax(s)*eps\n num = numpy.sum(s > tol,dtype=int)\n Q = u[:,:num]\n return Q", "def test_svd_smoothing_no_model(self):\n\t\t\n\t\t# 819 =~ 4096*0.2\n\t\t\n\t\tself.watcher.SVDSmoothing(model=self.model, layers=[21])\n\t\tesd = self.watcher.get_ESD(layer=21) \n\t\tnum_comps = len(esd[esd>10**-10])\n\t\tself.assertEqual(num_comps, 819)", "def svd_flip(u, v, u_based_decision=True):\n if u_based_decision:\n # columns of u, rows of v\n max_abs_cols = np.argmax(np.abs(u), axis=0)\n signs = np.sign(u[max_abs_cols, range(u.shape[1])])\n u *= signs\n v *= signs[:, np.newaxis]\n else:\n # rows of v, columns of u\n max_abs_rows = np.argmax(np.abs(v), axis=1)\n signs = np.sign(v[range(v.shape[0]), max_abs_rows])\n u *= signs\n v *= signs[:, np.newaxis]\n return u, v", "def svd_factorization_projections(A, m, n, orth_tol, max_refin, tol):\n # SVD Factorization\n U, s, Vt = scipy.linalg.svd(A, full_matrices=False)\n\n # Remove dimensions related with very small singular values\n U = U[:, s > tol]\n Vt = Vt[s > tol, :]\n s = s[s > tol]\n\n # z = x - A.T inv(A A.T) A x\n def null_space(x):\n # v = U 1/s V.T x = inv(A A.T) A x\n aux1 = Vt.dot(x)\n aux2 = 1/s*aux1\n v = U.dot(aux2)\n z = x - A.T.dot(v)\n\n # Iterative refinement to improve roundoff\n # errors described in [2]_, algorithm 5.1.\n k = 0\n while orthogonality(A, z) > orth_tol:\n if k >= max_refin:\n break\n # v = U 1/s V.T x = inv(A A.T) A x\n aux1 = Vt.dot(z)\n aux2 = 1/s*aux1\n v = U.dot(aux2)\n # z_next = z - A.T v\n z = z - A.T.dot(v)\n k += 1\n\n return z\n\n # z = inv(A A.T) A x\n def least_squares(x):\n # z = U 1/s V.T x = inv(A A.T) A x\n aux1 = Vt.dot(x)\n aux2 = 1/s*aux1\n z = U.dot(aux2)\n return z\n\n # z = A.T inv(A A.T) x\n def row_space(x):\n # z = V 1/s U.T x\n aux1 = U.T.dot(x)\n aux2 = 1/s*aux1\n z = Vt.T.dot(aux2)\n return z\n\n return null_space, least_squares, row_space", "def pinv_damped(a, l, rcond=1e-15 ):\n # a, wrap = np.linalg._makearray(a)\n # np.linalg._assertNoEmpty2d(a)\n a = a.conjugate()\n u, s, vt = np.linalg.svd(a, 0)\n m = u.shape[0]\n n = vt.shape[1]\n cutoff = rcond*np.maximum.reduce(s)\n # estimate of smallest singular value\n # s_m = 0.00065\n s_m = np.amin(s)\n # size of the singular region\n eps = .10\n if s_m > eps:\n lambda_2 =0\n else:\n lambda_2 = (1-(s_m/eps)**2)*l*l\n # print np.amin(s), lambda_2\n for i in range(min(n, m)):\n if s[i] > cutoff:\n s[i] = s[i]/(s[i]*s[i]+lambda_2)\n # s[i] = s[i]/(s[i]*s[i]+l*l)\n else:\n s[i] = 0.;\n print('singularity: ', s)\n # if s[i] > cutoff:\n # s[i] = s[i]/(s[i]*s[i]+lambda_2)\n # # s[i] = s[i]/(s[i]*s[i]+l*l)\n # else:\n # s[i] = 0.;\n # print('singularity: ', s)\n # print np.maximum.reduce(s)\n res = np.dot(np.transpose(vt), np.multiply(s[:, np.newaxis], np.transpose(u)))\n # return wrap(res)\n return res", "def sedov(t, E0, rho0, gamma, num_points=1000, nu=3):\n from scipy.special import gamma as Gamma\n\n # the similarity variable\n v_min = 2.0 / ((nu + 2) * gamma)\n v_max = 4.0 / ((nu + 2) * (gamma + 1))\n\n v = v_min + np.arange(num_points) * (v_max - v_min) / (num_points - 1.0)\n\n a = _sedov_calc_a(gamma, nu)\n beta = _sedov_calc_beta(v, gamma, nu)\n lbeta = np.log(beta)\n\n r = np.exp(-a[0] * lbeta[0] - a[2] * lbeta[1] - a[1] * lbeta[2])\n rho = ((gamma + 1.0) /\n (gamma - 1.0)) * np.exp(a[3] * lbeta[1] + a[5] * lbeta[3] +\n a[4] * lbeta[2])\n p = np.exp(nu * a[0] * lbeta[0] + (a[5] + 1) * lbeta[3] +\n (a[4] - 2 * a[1]) * lbeta[2])\n u = beta[0] * r * 4.0 / ((gamma + 1.0) * (nu + 2.0))\n p *= 8.0 / ((gamma + 1.0) * (nu + 2.0) * (nu + 2.0))\n\n # we have to take extra care at v=v_min, since this can be a special point.\n # It is not a singularity, however, the gradients of our variables (wrt v)\n # are:\n # r -> 0, u -> 0, rho -> 0, p-> constant\n\n u[0] = 0.0\n rho[0] = 0.0\n r[0] = 0.0\n p[0] = p[1]\n\n # volume of an n-sphere\n vol = (np.pi**(nu / 2.0) / Gamma(nu / 2.0 + 1.0)) * np.power(r, nu)\n\n # note we choose to evaluate the integral in this way because the\n # volumes of the first few elements (i.e near v=vmin) are shrinking\n # very slowly, so we dramatically improve the error convergence by\n # finding the volumes exactly. This is most important for the\n # pressure integral, as this is on the order of the volume.\n\n # (dimensionless) energy of the model solution\n de = rho * u * u * 0.5 + p / (gamma - 1.0)\n # integrate (trapezium rule)\n q = np.inner(de[1:] + de[:-1], np.diff(vol)) * 0.5\n\n # the factor to convert to this particular problem\n fac = (q * (t**nu) * rho0 / E0)**(-1.0 / (nu + 2.0))\n\n # shock speed\n shock_speed = fac * (2.0 / (nu + 2.0))\n rho_s = ((gamma + 1.0) / (gamma - 1.0)) * rho0\n r_s = shock_speed * t * (nu + 2.0) / 2.0\n p_s = (2.0 * rho0 * shock_speed * shock_speed) / (gamma + 1.0)\n u_s = (2.0 * shock_speed) / (gamma + 1.0)\n\n r *= fac * t\n u *= fac\n p *= fac * fac * rho0\n rho *= rho0\n\n return r, p, rho, u, r_s, p_s, rho_s, u_s, shock_speed", "def split_mps_tensor(A: np.ndarray, qd0: Sequence[int], qd1: Sequence[int], qD: Sequence[Sequence[int]], svd_distr: str, tol=0):\n assert A.ndim == 3\n d0 = len(qd0)\n d1 = len(qd1)\n assert d0 * d1 == A.shape[0], 'physical dimension of MPS tensor must be equal to d0 * d1'\n # reshape as matrix and split by SVD\n A = A.reshape((d0, d1, A.shape[1], A.shape[2])).transpose((0, 2, 1, 3))\n s = A.shape\n q0 = qnumber_flatten([ qd0, qD[0]])\n q1 = qnumber_flatten([-qd1, qD[1]])\n A0, sigma, A1, qbond = split_matrix_svd(A.reshape((s[0]*s[1], s[2]*s[3])), q0, q1, tol)\n A0.shape = (s[0], s[1], len(sigma))\n A1.shape = (len(sigma), s[2], s[3])\n # use broadcasting to distribute singular values\n if svd_distr == 'left':\n A0 = A0 * sigma\n elif svd_distr == 'right':\n A1 = A1 * sigma[:, None, None]\n elif svd_distr == 'sqrt':\n s = np.sqrt(sigma)\n A0 = A0 * s\n A1 = A1 * s[:, None, None]\n else:\n raise ValueError('svd_distr parameter must be \"left\", \"right\" or \"sqrt\".')\n # move physical dimension to the front\n A1 = A1.transpose((1, 0, 2))\n return (A0, A1, qbond)", "def snv(spectra):\n\n return (spectra - np.mean(spectra, axis=0)) / np.std(spectra, axis=0)", "def dv(self, u, v):\n return np.column_stack(\n [_.ev(u, v, dy=1) for _ in (self.splz, self.sply, self.splx)])" ]
[ "0.7485799", "0.7444824", "0.7392306", "0.73218936", "0.7318503", "0.7112886", "0.70934653", "0.7035592", "0.70275265", "0.7018896", "0.6982395", "0.6862845", "0.67582536", "0.67388934", "0.66284513", "0.6591925", "0.65776706", "0.6568088", "0.65605414", "0.65488946", "0.6543062", "0.64611787", "0.64378065", "0.6423421", "0.62454784", "0.62272644", "0.6160203", "0.6110558", "0.60907257", "0.60899884", "0.6078716", "0.60708153", "0.6060353", "0.6052859", "0.6044797", "0.60295504", "0.59922504", "0.5976666", "0.5970552", "0.59467375", "0.59284073", "0.5925002", "0.5897032", "0.589446", "0.5883488", "0.58317405", "0.5829847", "0.58258617", "0.57907706", "0.5789268", "0.577864", "0.5761188", "0.57483184", "0.5740207", "0.5736776", "0.57259387", "0.5691409", "0.5686592", "0.5681563", "0.5679991", "0.56752217", "0.562649", "0.56154525", "0.5611178", "0.55934674", "0.55859435", "0.5584493", "0.55758554", "0.5569834", "0.55687284", "0.556277", "0.554564", "0.55338365", "0.55317473", "0.5512849", "0.5512306", "0.5504091", "0.549327", "0.5490923", "0.5465475", "0.5461862", "0.5429221", "0.5418245", "0.54143524", "0.54058105", "0.5393694", "0.53460693", "0.53413707", "0.53402746", "0.5339579", "0.5328238", "0.53254324", "0.5323026", "0.5317443", "0.53165895", "0.53112173", "0.53108484", "0.53060466", "0.53053886", "0.5290398" ]
0.64440066
22
This is a recursive partioned QR. about two times faster than QR for tall matrices
def fast_qr(A): N,M=A.shape if N < M: Q, R =qr(A) # not tall matrices, use normal qr elif M>16: # the minimal partition size is 32 R=np.zeros((M,M)) k= np.ceil(M/2).astype(int) qa, R[:k, :k]=fast_qr(A[:,:k]) R[:k,k:] = np.dot(qa.T, A[:,k:]) qb, R[k:, k:]=fast_qr(A[:,k:]-np.dot(qa, R[:k,k:])) Q = np.hstack((qa, qb)) else: # reach the smallest partition size, no more partitions, use normal qr Q, R =qr(A) return Q, R
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def qr(in_A):\n # input checks\n Ndim = numpy.ndim(in_A)\n assert Ndim == 2\n N,M = numpy.shape(in_A)\n assert N==M\n D,P = in_A[0,0].data.shape\n\n # prepare R and QT\n R = in_A.copy()\n QT = numpy.array([[UTPS(numpy.zeros((D,P))) for c in range(N)] for r in range(N) ])\n for n in range(N):\n QT[n,n].data[0,:] = 1\n\n # main algorithm\n for n in range(N):\n for m in range(n+1,N):\n a = R[n,n]\n b = R[m,n]\n r = numpy.sqrt(a**2 + b**2)\n c = a/r\n s = b/r\n\n for k in range(N):\n Rnk = R[n,k]\n \n R[n,k] = c*Rnk + s*R[m,k]\n R[m,k] =-s*Rnk + c*R[m,k];\n\n QTnk = QT[n,k]\n QT[n,k] = c*QTnk + s*QT[m,k]\n QT[m,k] =-s*QTnk + c*QT[m,k];\n # #print 'QT:\\n',QT\n # #print 'R:\\n',R\n # #print '-------------'\n\n return QT.T,R", "def qr_iteration(a):\n # Implementation Note: variables names kept the same as the pseudo code's\n n = a.shape[0]\n a_ = a.copy()\n q_ = np.eye(n)\n temp_q = np.empty((n, n))\n delta_matrix = np.empty((n, n))\n\n for i in range(n):\n q, r = gram_schmidt(a_)\n np.matmul(r, q, out=a_)\n np.matmul(q_, q, out=temp_q)\n np.subtract(np.abs(q_), np.abs(temp_q), out=delta_matrix)\n if np.all(np.abs(delta_matrix, out=delta_matrix) <= EPSILON):\n # reached convergence\n return a_, q_\n q_, temp_q = temp_q, q_\n\n # reached iterations bound (n)\n return a_, q_", "def QR(self):\n m, n = self.shape\n assert m >= n, \"Requires m>=n\"\n R = self.copy()\n Q = eye(m)\n\n for j in range(n):\n reflect_me = R[j:, j].copy()\n v, beta = reflect_me._house()\n H = eye(m)\n # A[j:, j:] = (I - beta*v*v.T)*A[j:, j:]\n H[j:, j:] -= (v @ v.T()) * beta\n # Not producing correct triangular matrix.\n # Q looks good though.\n R = H @ R\n Q = H @ Q\n return Q[:n].T(), R[:n]", "def qrtest( A ):\n \n A = numpy.array(A, dtype=float)\n m,n = A.shape\n Q = numpy.zeros( (m,n) )\n R1 = numpy.zeros( (m,n) )\n R = numpy.zeros( (n,n) )\n\n Q,R = qr_mgs(A)\n\n for ii in range(0,1) : # repeat how many times\n Q,R1 = qr_mgs(Q)\n R = numpy.dot(R1,R)\n\n return Q,R", "def __split1_qr_loop(\n dcol: int, r_tiles: SquareDiagTiles, q0_tiles: SquareDiagTiles, calc_q: bool\n) -> None:\n r_torch_device = r_tiles.arr.larray.device\n q0_torch_device = q0_tiles.arr.larray.device if calc_q else None\n # ==================================== R Calculation - single tile =========================\n # loop over each column, need to do the QR for each tile in the column(should be rows)\n # need to get the diagonal process\n rank = r_tiles.arr.comm.rank\n cols_on_proc = torch.cumsum(\n torch.tensor(r_tiles.tile_columns_per_process, device=r_torch_device), dim=0\n )\n not_completed_processes = torch.nonzero(input=dcol < cols_on_proc, as_tuple=False).flatten()\n diag_process = not_completed_processes[0].item()\n tile_rows = r_tiles.tile_rows\n # get the diagonal tile and do qr on it\n # send q to the other processes\n # 1st qr: only on diagonal tile + apply to the row\n if rank == diag_process:\n # do qr on diagonal process\n try:\n q1, r1 = torch.linalg.qr(r_tiles[dcol, dcol], mode=\"complete\")\n except AttributeError:\n q1, r1 = r_tiles[dcol, dcol].qr(some=False)\n\n r_tiles.arr.comm.Bcast(q1.clone(), root=diag_process)\n r_tiles[dcol, dcol] = r1\n # apply q1 to the trailing matrix (other processes)\n\n # need to convert dcol to a local index\n loc_col = dcol - sum(r_tiles.tile_columns_per_process[:rank])\n hold = r_tiles.local_get(key=(dcol, slice(loc_col + 1, None)))\n if hold is not None: # if there is more data on that row after the diagonal tile\n r_tiles.local_set(key=(dcol, slice(loc_col + 1, None)), value=torch.matmul(q1.T, hold))\n elif rank > diag_process:\n # recv the Q from the diagonal process, and apply it to the trailing matrix\n st_sp = r_tiles.get_start_stop(key=(dcol, dcol))\n sz = st_sp[1] - st_sp[0], st_sp[3] - st_sp[2]\n\n q1 = torch.zeros(\n (sz[0], sz[0]), dtype=r_tiles.arr.dtype.torch_type(), device=r_torch_device\n )\n loc_col = 0\n r_tiles.arr.comm.Bcast(q1, root=diag_process)\n hold = r_tiles.local_get(key=(dcol, slice(0, None)))\n r_tiles.local_set(key=(dcol, slice(0, None)), value=torch.matmul(q1.T, hold))\n else:\n # these processes are already done calculating R, only need to calc Q, need to recv q1\n st_sp = r_tiles.get_start_stop(key=(dcol, dcol))\n sz = st_sp[1] - st_sp[0], st_sp[3] - st_sp[2]\n q1 = torch.zeros(\n (sz[0], sz[0]), dtype=r_tiles.arr.dtype.torch_type(), device=r_torch_device\n )\n r_tiles.arr.comm.Bcast(q1, root=diag_process)\n\n # ================================ Q Calculation - single tile =============================\n if calc_q:\n for row in range(q0_tiles.tile_rows_per_process[rank]):\n # q1 is applied to each tile of the column dcol of q0 then written there\n q0_tiles.local_set(\n key=(row, dcol), value=torch.matmul(q0_tiles.local_get(key=(row, dcol)), q1)\n )\n del q1\n # loop over the rest of the rows, combine the tiles, then apply the result to the rest\n # 2nd step: merged QR on the rows\n # ================================ R Calculation - merged tiles ============================\n diag_tile = r_tiles[dcol, dcol]\n # st_sp = r_tiles.get_start_stop(key=(dcol, dcol))\n diag_st_sp = r_tiles.get_start_stop(key=(dcol, dcol))\n diag_sz = diag_st_sp[1] - diag_st_sp[0], diag_st_sp[3] - diag_st_sp[2]\n # (Q) need to get the start stop of diag tial\n for row in range(dcol + 1, tile_rows):\n lp_st_sp = r_tiles.get_start_stop(key=(row, dcol))\n lp_sz = lp_st_sp[1] - lp_st_sp[0], lp_st_sp[3] - lp_st_sp[2]\n if rank == diag_process:\n # cat diag tile and loop tile\n loop_tile = r_tiles[row, dcol]\n loop_cat = torch.cat((diag_tile, loop_tile), dim=0)\n # qr\n try:\n ql, rl = torch.linalg.qr(loop_cat, mode=\"complete\")\n except AttributeError:\n ql, rl = loop_cat.qr(some=False)\n # send ql to all\n r_tiles.arr.comm.Bcast(ql.clone().contiguous(), root=diag_process)\n # set rs\n r_tiles[dcol, dcol] = rl[: diag_sz[0]]\n r_tiles[row, dcol] = rl[diag_sz[0] :]\n # apply q to rest\n if loc_col + 1 < r_tiles.tile_columns_per_process[rank]:\n upp = r_tiles.local_get(key=(dcol, slice(loc_col + 1, None)))\n low = r_tiles.local_get(key=(row, slice(loc_col + 1, None)))\n hold = torch.matmul(ql.T, torch.cat((upp, low), dim=0))\n # set upper\n r_tiles.local_set(key=(dcol, slice(loc_col + 1, None)), value=hold[: diag_sz[0]])\n # set lower\n r_tiles.local_set(key=(row, slice(loc_col + 1, None)), value=hold[diag_sz[0] :])\n elif rank > diag_process:\n ql = torch.zeros(\n [lp_sz[0] + diag_sz[0]] * 2,\n dtype=r_tiles.arr.dtype.torch_type(),\n device=r_torch_device,\n )\n r_tiles.arr.comm.Bcast(ql, root=diag_process)\n upp = r_tiles.local_get(key=(dcol, slice(0, None)))\n low = r_tiles.local_get(key=(row, slice(0, None)))\n hold = torch.matmul(ql.T, torch.cat((upp, low), dim=0))\n # set upper\n r_tiles.local_set(key=(dcol, slice(0, None)), value=hold[: diag_sz[0]])\n # set lower\n r_tiles.local_set(key=(row, slice(0, None)), value=hold[diag_sz[0] :])\n else:\n ql = torch.zeros(\n [lp_sz[0] + diag_sz[0]] * 2,\n dtype=r_tiles.arr.dtype.torch_type(),\n device=r_torch_device,\n )\n r_tiles.arr.comm.Bcast(ql, root=diag_process)\n # ================================ Q Calculation - merged tiles ========================\n if calc_q:\n top_left = ql[: diag_sz[0], : diag_sz[0]]\n top_right = ql[: diag_sz[0], diag_sz[0] :]\n bottom_left = ql[diag_sz[0] :, : diag_sz[0]]\n bottom_right = ql[diag_sz[0] :, diag_sz[0] :]\n # two multiplications: one for the left tiles and one for the right\n # left tiles --------------------------------------------------------------------\n # create r column of the same size as the tile row of q0\n st_sp = r_tiles.get_start_stop(key=(slice(dcol, None), dcol))\n qloop_col_left_sz = st_sp[1] - st_sp[0], st_sp[3] - st_sp[2]\n qloop_col_left = torch.zeros(\n qloop_col_left_sz, dtype=q0_tiles.arr.dtype.torch_type(), device=q0_torch_device\n )\n # top left starts at 0 and goes until diag_sz[1]\n qloop_col_left[: diag_sz[0]] = top_left\n # bottom left starts at ? and goes until ? (only care about 0th dim)\n st, sp, _, _ = r_tiles.get_start_stop(key=(row, 0))\n st -= diag_st_sp[0] # adjust these by subtracting the start index of the diag tile\n sp -= diag_st_sp[0]\n qloop_col_left[st:sp] = bottom_left\n # right tiles --------------------------------------------------------------------\n # create r columns tensor of the size of the tile column of index 'row'\n st_sp = q0_tiles.get_start_stop(key=(row, slice(dcol, None)))\n sz = st_sp[1] - st_sp[0], st_sp[3] - st_sp[2]\n qloop_col_right = torch.zeros(\n sz[1], sz[0], dtype=q0_tiles.arr.dtype.torch_type(), device=q0_torch_device\n )\n # top left starts at 0 and goes until diag_sz[1]\n qloop_col_right[: diag_sz[0]] = top_right\n # bottom left starts at ? and goes until ? (only care about 0th dim)\n st, sp, _, _ = r_tiles.get_start_stop(key=(row, 0))\n st -= diag_st_sp[0] # adjust these by subtracting the start index of the diag tile\n sp -= diag_st_sp[0]\n qloop_col_right[st:sp] = bottom_right\n for qrow in range(q0_tiles.tile_rows_per_process[rank]):\n # q1 is applied to each tile of the column dcol of q0 then written there\n q0_row = q0_tiles.local_get(key=(qrow, slice(dcol, None))).clone()\n q0_tiles.local_set(key=(qrow, dcol), value=torch.matmul(q0_row, qloop_col_left))\n q0_tiles.local_set(key=(qrow, row), value=torch.matmul(q0_row, qloop_col_right))\n del ql", "def householder_qr(A):\n m, n = A.shape\n I = np.eye(m, dtype = complex)\n Ahat = np.zeros((m, n+m), dtype = complex)\n Ahat[:, :n] = A\n Ahat[:, n:] = I\n\n Rhat = householder(Ahat)\n R = Rhat[:,:n]\n Q = Rhat[:,n:].transpose().conj()\n\n return Q, R", "def qr_identities(ten,split):\n # Get splitting tensors\n shape = ten.shape\n nind = len(shape)\n t0 = time.time()\n split_left = gen_split(shape[:split],left=True)\n split_right = gen_split(shape[split:],left=False)\n tf = time.time()\n print('\\tGenerate split tensors time = {}'.format(tf-t0))\n # Combine tensor indices\n letters = 'abcdefghijklmnopqrstuvwxyz'\n t0 = time.time()\n einstr = letters[:split+1] + ',' + \\\n letters[1:nind+1] + '->' + \\\n letters[0]+letters[split+1:nind+1]\n ten = np.einsum(einstr,split_left,ten)\n einstr = letters[0]+letters[split+1:nind+1] + ',' + \\\n letters[split+1:nind+2] + '->' + \\\n letters[0] + letters[nind+1]\n ten = np.einsum(einstr,ten,split_right)\n tf = time.time()\n print('\\tTensor contraction to mat time {}'.format(tf-t0))\n # Do QR of reshaped tensor\n t0 = time.time()\n Q,R = np.linalg.qr(ten)\n tf = time.time()\n print('\\tActual QR time = {}'.format(tf-t0))\n # Split resulting tensor indices\n t0 = time.time()\n einstr = letters[:split+1] + ',' + \\\n letters[0] + letters[-1] + '->' + \\\n letters[1:split+1] + letters[-1]\n Q = np.einsum(einstr,split_left,Q)\n einstr = letters[-1] + letters[nind+1] + ',' + \\\n letters[split+1:nind+2] + '->' + \\\n letters[-1] + letters[split+1:nind+1]\n R = np.einsum(einstr,R,split_right)\n tf = time.time()\n print('\\tMat contraction to tensor time {}'.format(tf-t0))\n # Return Result\n return Q,R", "def qr(T):\n Q, R = splinalg.qr(T, mode='economic')\n sR = np.sign(np.real(np.diag(R)))\n sR[sR == 0] = 1\n Q, R = Q * sR, sR.reshape([-1, 1]) * R\n # maxQ, minQ = Q.max(0), Q.min(0)\n # maxR, minR = R.max(1), R.min(1)\n # ind = (np.abs(minQ) > maxQ) & (np.abs(minR) > maxR)\n # Q[:, ind] *= -1\n # R[ind] *= -1\n return Q, R", "def qr_mgs( A ):\n\n A = numpy.array(A, dtype=float)\n m,n = A.shape\n Q = numpy.zeros( (m,n) )\n R = numpy.zeros( (n,n) )\n\n for k in range( 0, n ) :\n R[k,k] = numpy.linalg.norm( A[:,k] )\n Q[:,k] = A[:,k] / R[k,k]\n\n for j in range( k+1, n ) :\n R[k,j] = numpy.dot( Q[:,k], A[:,j] )\n A[:,j] = A[:,j] - Q[:,k] * R[k,j]\n \n return Q,R", "def qr(A, useLast=False):\n\n A = np.matrix.copy(A)\n m, n = A.shape\n Q = np.eye(m)\n p = list(range(n))\n if min(m, n) <= 1 and useLast:\n return np.eye(1), A, [p[-1]] + p[:-1]\n for i, _ in zip(range(n - 1), range(m - 1)):\n H = np.eye(m)\n\n # find the most nondependent column\n if (i == 0) and useLast:\n kMax = n - 1\n else:\n vMax = 0\n kMax = i\n for k in range(i, n):\n v = np.linalg.norm(A[i:, k])\n if v > vMax:\n vMax = v\n kMax = k\n A[:, [i, kMax]] = A[:, [kMax, i]]\n p[i], p[kMax] = p[kMax], p[i]\n\n H[i:, i:] = Linalg.make_householder(A[i:, i])\n Q = np.dot(Q, H)\n A = np.dot(H, A)\n return Q, A, p", "def qr_decomposition(self):\n if self.m != self.n:\n raise NotImplementedError('QR decomposition not yet available ' +\n 'for non-square matrices')\n orig_basis = [vec.Vector.fromMatrixColumn(self, j)\n for j in range(self.m)]\n orthog_basis, norm_basis = [], []\n for j in range(self.m):\n u = orig_basis[j]\n for k in range(j):\n u -= orig_basis[j].project_onto(orthog_basis[k])\n orthog_basis.append(u)\n norm_basis.append(u.normalise())\n Q = Matrix.fromVectors(norm_basis)\n R = Q.transpose() * self\n return Q, R", "def _blr_tsqr(obj):\n nb = obj.nb[0]\n A = obj\n Q = core.BlockLowRank(numpy.full((nb, 1), None))\n B = numpy.full(nb, None)\n\n for i in range(nb):\n if isinstance(A[i, 0], core.LowRank):\n Qi, Ri = qr(A[i, 0].U)\n Q[i, 0] = Qi\n B[i] = Ri * A[i, 0].V\n else:\n B[i] = A[i, 0]\n\n B = numpy.vstack(B)\n\n if B.shape[0] < B.shape[1]:\n Z = numpy.zeros((B.shape[1] - B.shape[0], B.shape[1]))\n B = numpy.vstack([B, Z])\n\n Qb, R = qr(B)\n rstart, rend = 0, 0\n\n for i in range(nb):\n if isinstance(A[i, 0], core.LowRank):\n rstart = rend\n rend = rend + A[i, 0].rank\n U = Q[i, 0]\n V = Qb[rstart:rend, :]\n Q[i, 0] = core.LowRank((U, V), A[i, 0].method, A[i, 0].eps)\n else:\n rstart = rend\n rend = rend + A[i, 0].shape[0]\n Q[i, 0] = Qb[rstart:rend, :]\n\n return Q, R", "def leastsquares(A,b,qr=qrfact.qri_mgs_piv,alpha=0.5):\n \n\n A = numpy.array(A, dtype=float)\n m,n = A.shape\n z = numpy.zeros( n )\n a = numpy.zeros( n )\n x = numpy.zeros( n )\n b = numpy.transpose(b)[0]\n\n # do the QR factorization\n try:\n Q,R = qr(A)[:2] # Some QR routines return a third permutation P solving AP=QR.\n PA = A\n except TypeError:\n Q,R,P = qr(A,alpha)[:3] # Some QR routines return a third permutation P solving AP=QR.\n AP = numpy.dot( A, P )\n\n # Step 1'': orthogonalization of b against Q\n u = b\n for j in range( 0, n ) :\n # print \"Qj = \", Q[:,j]\n # print \"u = \", u\n # print \"dot = \", numpy.dot( Q[:,j], u )\n z[j] = numpy.dot( Q[:,j], u )\n u = u - z[j] * Q[:,j]\n\n # Step 2'': iterative orthogonalization of u\n ul2norm = numpy.linalg.norm( u )\n ii = 0\n while True : # iterate\n for j in range( 0, n ) :\n a[j] = numpy.dot( Q[:,j], u )\n z[j] = z[j] + a[j]\n u = u - a[j] * Q[:,j]\n\n ii = ii + 1\n ulnorm = ul2norm\n ul2norm = numpy.linalg.norm( u )\n\n #print ul2norm, ulnorm\n \n if (ul2norm > alpha * ulnorm) or ul2norm == 0 :\n # print \"used\", ii, \"orthogonalizations\"\n break\n\n #print z\n #print R\n\n # Step 3'': use back substitution to solve Rx = z\n for i in range( n-1, -1, -1 ) :\n x[i] = z[i]\n for j in range( i+1, n ) :\n x[i] = x[i] - R[i,j] * x[j]\n x[i] = x[i] / R[i,i]\n #print x\n\n #need to permute x according to permutation matrix P\n \n return numpy.dot( P, x )", "def qr_reshape(ten,split):\n shape = ten.shape\n matshape = (np.prod(shape[:split]),np.prod(shape[split:]))\n t0 = time.time()\n mat = np.reshape(ten,matshape)\n tf = time.time()\n print('\\tInitial Reshape time = {}'.format(tf-t0))\n t0 = time.time()\n Q,R = np.linalg.qr(mat)\n tf = time.time()\n print('\\tActual QR time = {}'.format(tf-t0))\n t0 = time.time()\n qshape = shape[:split]+(-1,)\n Q = np.reshape(Q,qshape)\n rshape = (-1,)+shape[split:]\n R = np.reshape(R,rshape)\n tf = time.time()\n print('\\tFinal Reshape time = {}'.format(tf-t0))\n return Q,R", "def GramSchmidt(A):\r\n n = len(A)\r\n # Finds the number of lists in the list, which is also the number of rows\r\n m = len(A[0])\r\n # Finds the number of elements in list one, which is also the number of columns\r\n V = A\r\n R = [[0]*n for i in range(n)]\r\n # creates an empty list R with dimensions of n rows and n columns\r\n Q = [[0]*m for i in range(n)]\r\n # creates an empty list Q with dimensions of n rows and m columns\r\n inputStatus = True\r\n # inputStatus is true at this point until proven otherwise\r\n for i in range(n):\r\n for j in range(m):\r\n if ((type(A[i][j]) != int) and (type(A[i][j]) != float) and (type(A[i][j]) != complex)):\r\n inputStatus = False\r\n print(\"Invalid Input\")\r\n # this checks each value in the matrix A to make sure it is some time of number, if it isnt a number then the input status will be false \r\n # if the input status is false then an error message will be displayed stating that this is an invalid input\r\n if inputStatus == True:\r\n # if the given list does not fall under the previous if statement then the input status will continue to be true and we can continue to find the QR factorization \r\n for i in range(n):\r\n # for loop which continues as long as there are still lists in A \r\n R[i][i] = norm(V[i])\r\n # Creates the border for the upper triangle matrix R, where each value in the diagonal is the 2 norm of the corresponding vector in the original matrix A \r\n Q[i] = unit(V[i])\r\n # Each vector in Q is the unit vector of the corresponding vector in A \r\n for j in range(i+1,n):\r\n # the position j will be 1 more than the position i \r\n R[j][i] = dot(Q[i],V[j])\r\n # The element in R[i+1][i] is the dot product of Q[i] and V[i+1] \r\n temp = scalarmul(R[j][i],Q[i])\r\n # This is the scalar multiplication of R[i+1][i] and Q[i] which will be labeled as temp \r\n V[j] = subtract(V[j],temp)\r\n # V[j] is the difference between the original V[j] and temp \r\n return[Q,R]", "def qr_factorize(X, offset=0):\n assert offset in [0, 1]\n assert type(X) == np.ndarray\n assert X.shape[0] == X.shape[1]\n\n R = copy.deepcopy(X)\n Q = np.eye(X.shape[0])\n\n for i in range(X.shape[0]-offset):\n Pi = np.eye(R.shape[0])\n _, Qi = hreflect1D(R[i+offset:, i])\n Pi[i+offset:, i+offset:] = Qi\n\n Q = Pi.dot(Q)\n R = Pi.dot(R)\n\n return Q.T, R", "def qr_step_factorization(q, r, iter, n):\n v = column_convertor(r[iter:, iter])\n Hbar, reflect = householder_transformation(v)\n H = np.identity(n)\n H[iter:, iter:] = Hbar\n r = np.matmul(H, r)\n q = np.matmul(q, H)\n return q, r,reflect", "def qrm(X, maxiter=15000, debug=False):\n n, m = X.shape\n assert n == m\n\n # First stage: transform to upper Hessenberg-matrix.\n A = copy.deepcopy(X)\n conv = False\n k = 0\n\n # Second stage: perform QR-transformations.\n while (not conv) and (k < maxiter):\n k += 1\n Q, R = helpers.qr_factorize(A)\n A = R.dot(Q)\n\n conv = np.alltrue(np.isclose(np.tril(A, k=-1), np.zeros((n, n))))\n\n if not conv:\n warnings.warn(\"Convergence was not reached. Consider raising maxiter.\")\n if debug:\n return k\n Evals = A.diagonal()\n order = np.abs(Evals).argsort()[::-1]\n return Evals[order], Q[order, :]", "def _QrGradSquareAndDeepMatrices(q: Array, r: Array, dq: Array, dr: Array) -> Array:\n\n # Modification begins\n rdiag = tf.linalg.diag_part(r)\n small_indices = tf.where(tf.math.abs(rdiag) < qr_epsilon)\n length = tf.shape(small_indices)[0]\n newvalues = tf.ones((length,), dtype=rdiag.dtype) * qr_epsilon\n rdiag = tf.tensor_scatter_nd_update(rdiag, small_indices, newvalues)\n delta_r = tf.linalg.set_diag(r, rdiag) - r\n r = r + delta_r\n # delta_dq = math_ops.matmul(q, math_ops.matmul(dr, tf.linalg.adjoint(delta_r)))\n # dq = dq + delta_dq\n # Modification ends\n\n qdq = tf.matmul(q, dq, adjoint_a=True)\n qdq_ = qdq - tf.linalg.adjoint(qdq)\n rdr = tf.matmul(r, dr, adjoint_b=True)\n rdr_ = rdr - tf.linalg.adjoint(rdr)\n tril = tf.linalg.band_part(qdq_ + rdr_, -1, 0)\n\n grad_a = tf.matmul(q, dr + _TriangularSolve(tril, r))\n grad_b = _TriangularSolve(dq - tf.matmul(q, qdq), r)\n ret = grad_a + grad_b\n\n if q.dtype.is_complex:\n m = rdr - tf.linalg.adjoint(qdq)\n eyem = tf.linalg.set_diag(tf.zeros_like(m), tf.linalg.diag_part(m))\n correction = eyem - tf.cast(tf.math.real(eyem), q.dtype)\n ret = ret + _TriangularSolve(tf.matmul(q, tf.linalg.adjoint(correction)), r)\n\n return ret", "def qrm2(X, maxiter=15000, debug=False):\n n, m = X.shape\n assert n == m\n\n # First stage: transform to upper Hessenberg-matrix.\n A = lin.hessenberg(X)\n conv = False\n k = 0\n\n # Second stage: perform QR-transformations.\n while (not conv) and (k < maxiter):\n k += 1\n Q, R = helpers.qr_factorize(A)\n A = R.dot(Q)\n\n conv = np.alltrue(np.isclose(np.tril(A, k=-1), np.zeros((n, n))))\n\n if not conv:\n warnings.warn(\"Convergence was not reached. Consider raising maxiter.\")\n if debug:\n return k\n Evals = A.diagonal()\n order = np.abs(Evals).argsort()[::-1]\n return Evals[order], Q[order, :]", "def qr(a):\n raise NotImplementedError", "def qrpos(mps):\n d, chiL, chiR = mps.shape\n mps_mat = fuse_left(mps)\n Q, R = np.linalg.qr(mps_mat)\n phases = np.sign(np.diag(R))\n Q = Q*phases\n R = phases.conj()[:, None] * R\n R = R / norm(R)\n mps_L = unfuse_left(Q, mps.shape)\n return (mps_L, R)", "def qrm3(X, maxiter=15000, debug=False):\n n, m = X.shape\n assert n == m\n\n # First stage: transform to upper Hessenberg-matrix.\n T = lin.hessenberg(X)\n\n conv = False\n k = 0\n\n # Second stage: perform QR-transformations.\n while (not conv) and (k < maxiter):\n k += 1\n Q, R = helpers.qr_factorize(T - T[n-1, n-1] * np.eye(n))\n T = R.dot(Q) + T[n-1, n-1] * np.eye(n)\n\n conv = np.alltrue(np.isclose(np.tril(T, k=-1), np.zeros((n, n))))\n\n if not conv:\n warnings.warn(\"Convergence was not reached. Consider raising maxiter.\")\n if debug:\n return k\n Evals = T.diagonal()\n order = np.abs(Evals).argsort()[::-1]\n return Evals[order], Q[order, :]", "def qr_factorization_projections(A, m, n, orth_tol, max_refin, tol):\n # QRFactorization\n Q, R, P = scipy.linalg.qr(A.T, pivoting=True, mode='economic')\n\n if np.linalg.norm(R[-1, :], np.inf) < tol:\n warn('Singular Jacobian matrix. Using SVD decomposition to ' +\n 'perform the factorizations.')\n return svd_factorization_projections(A, m, n,\n orth_tol,\n max_refin,\n tol)\n\n # z = x - A.T inv(A A.T) A x\n def null_space(x):\n # v = P inv(R) Q.T x\n aux1 = Q.T.dot(x)\n aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False)\n v = np.zeros(m)\n v[P] = aux2\n z = x - A.T.dot(v)\n\n # Iterative refinement to improve roundoff\n # errors described in [2]_, algorithm 5.1.\n k = 0\n while orthogonality(A, z) > orth_tol:\n if k >= max_refin:\n break\n # v = P inv(R) Q.T x\n aux1 = Q.T.dot(z)\n aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False)\n v[P] = aux2\n # z_next = z - A.T v\n z = z - A.T.dot(v)\n k += 1\n\n return z\n\n # z = inv(A A.T) A x\n def least_squares(x):\n # z = P inv(R) Q.T x\n aux1 = Q.T.dot(x)\n aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False)\n z = np.zeros(m)\n z[P] = aux2\n return z\n\n # z = A.T inv(A A.T) x\n def row_space(x):\n # z = Q inv(R.T) P.T x\n aux1 = x[P]\n aux2 = scipy.linalg.solve_triangular(R, aux1,\n lower=False,\n trans='T')\n z = Q.dot(aux2)\n return z\n\n return null_space, least_squares, row_space", "def qr(a, mode=\"reduced\"):\n return QRFull(mode)(a)", "def qr_solve(Q, R, rhs):\n tmp = np.dot(Q.T, rhs)\n return solve_triangular(R, tmp, lower=False)", "def newton_qr(jacobian_mat, newton_coefficient, step_size):\n identity = tf.eye(ps.shape(jacobian_mat)[0], dtype=jacobian_mat.dtype)\n step_size_cast = tf.cast(step_size, jacobian_mat.dtype)\n newton_matrix = (\n identity - step_size_cast * newton_coefficient * jacobian_mat)\n factorization = tf.linalg.qr(newton_matrix)\n return factorization.q, factorization.r", "def __split0_r_calc(\n r_tiles: SquareDiagTiles,\n q_dict: Dict,\n q_dict_waits: Dict,\n col_num: int,\n diag_pr: int,\n not_completed_prs: torch.Tensor,\n) -> None:\n tile_rows_proc = r_tiles.tile_rows_per_process\n comm = r_tiles.arr.comm\n rank = comm.rank\n lcl_tile_row = 0 if rank != diag_pr else col_num - sum(tile_rows_proc[:rank])\n # only work on the processes which have not computed the final result\n q_dict[col_num] = {}\n q_dict_waits[col_num] = {}\n\n # --------------- local QR calc -----------------------------------------------------\n base_tile = r_tiles.local_get(key=(slice(lcl_tile_row, None), col_num))\n try:\n q1, r1 = torch.linalg.qr(base_tile, mode=\"complete\")\n except AttributeError:\n q1, r1 = base_tile.qr(some=False)\n\n q_dict[col_num][\"l0\"] = [q1, base_tile.shape]\n r_tiles.local_set(key=(slice(lcl_tile_row, None), col_num), value=r1)\n if col_num != r_tiles.tile_columns - 1:\n base_rest = r_tiles.local_get((slice(lcl_tile_row, None), slice(col_num + 1, None)))\n loc_rest = torch.matmul(q1.T, base_rest)\n r_tiles.local_set(key=(slice(lcl_tile_row, None), slice(col_num + 1, None)), value=loc_rest)\n # --------------- global QR calc (binary merge) -------------------------------------\n rem1 = None\n rem2 = None\n offset = not_completed_prs[0]\n loop_size_remaining = not_completed_prs.clone()\n completed = bool(loop_size_remaining.size()[0] <= 1)\n procs_remaining = loop_size_remaining.size()[0]\n loop = 0\n while not completed:\n if procs_remaining % 2 == 1:\n # if the number of processes active is odd need to save the remainders\n if rem1 is None:\n rem1 = loop_size_remaining[-1]\n loop_size_remaining = loop_size_remaining[:-1]\n elif rem2 is None:\n rem2 = loop_size_remaining[-1]\n loop_size_remaining = loop_size_remaining[:-1]\n if rank not in loop_size_remaining and rank not in [rem1, rem2]:\n break # if the rank is done then exit the loop\n # send the data to the corresponding processes\n half_prs_rem = torch.div(procs_remaining, 2, rounding_mode=\"floor\")\n\n zipped = zip(\n loop_size_remaining.flatten()[:half_prs_rem],\n loop_size_remaining.flatten()[half_prs_rem:],\n )\n for pr in zipped:\n pr0, pr1 = int(pr[0].item()), int(pr[1].item())\n __split0_merge_tile_rows(\n pr0=pr0,\n pr1=pr1,\n column=col_num,\n rank=rank,\n r_tiles=r_tiles,\n diag_process=diag_pr,\n key=str(loop) + \"p0\" + str(pr0) + \"p1\" + str(pr1) + \"e\",\n q_dict=q_dict,\n )\n\n __split0_send_q_to_diag_pr(\n col=col_num,\n pr0=pr0,\n pr1=pr1,\n diag_process=diag_pr,\n comm=comm,\n q_dict=q_dict,\n key=str(loop) + \"p0\" + str(pr0) + \"p1\" + str(pr1) + \"e\",\n q_dict_waits=q_dict_waits,\n q_dtype=r_tiles.arr.dtype.torch_type(),\n q_device=r_tiles.arr.larray.device,\n )\n\n loop_size_remaining = loop_size_remaining[: -1 * (half_prs_rem)]\n procs_remaining = loop_size_remaining.size()[0]\n\n if rem1 is not None and rem2 is not None:\n # combine rem1 and rem2 in the same way as the other nodes,\n # then save the results in rem1 to be used later\n __split0_merge_tile_rows(\n pr0=rem2,\n pr1=rem1,\n column=col_num,\n rank=rank,\n r_tiles=r_tiles,\n diag_process=diag_pr,\n key=str(loop) + \"p0\" + str(int(rem1)) + \"p1\" + str(int(rem2)) + \"e\",\n q_dict=q_dict if q_dict is not None else {},\n )\n\n rem1, rem2 = int(rem1), int(rem2)\n __split0_send_q_to_diag_pr(\n col=col_num,\n pr0=rem2,\n pr1=rem1,\n diag_process=diag_pr,\n key=str(loop) + \"p0\" + str(int(rem1)) + \"p1\" + str(int(rem2)) + \"e\",\n q_dict=q_dict if q_dict is not None else {},\n comm=comm,\n q_dict_waits=q_dict_waits,\n q_dtype=r_tiles.arr.dtype.torch_type(),\n q_device=r_tiles.arr.larray.device,\n )\n rem1 = rem2\n rem2 = None\n\n loop += 1\n if rem1 is not None and rem2 is None and procs_remaining == 1:\n # combine rem1 with process 0 (offset) and set completed to True\n # this should be the last thing that happens\n __split0_merge_tile_rows(\n pr0=offset,\n pr1=rem1,\n column=col_num,\n rank=rank,\n r_tiles=r_tiles,\n diag_process=diag_pr,\n key=str(loop) + \"p0\" + str(int(offset)) + \"p1\" + str(int(rem1)) + \"e\",\n q_dict=q_dict,\n )\n\n offset, rem1 = int(offset), int(rem1)\n __split0_send_q_to_diag_pr(\n col=col_num,\n pr0=offset,\n pr1=rem1,\n diag_process=diag_pr,\n key=str(loop) + \"p0\" + str(int(offset)) + \"p1\" + str(int(rem1)) + \"e\",\n q_dict=q_dict,\n comm=comm,\n q_dict_waits=q_dict_waits,\n q_dtype=r_tiles.arr.dtype.torch_type(),\n q_device=r_tiles.arr.larray.device,\n )\n rem1 = None\n\n completed = True if procs_remaining == 1 and rem1 is None and rem2 is None else False", "def basis(A):\n if A.is_cuda:\n # torch.orgqr is not available in CUDA\n Q, _ = torch.qr(A, some=True)\n else:\n Q = torch.orgqr(*torch.geqrf(A))\n return Q", "def local_orthonormalize_left_qr(A: np.ndarray, Anext: np.ndarray, qd: Sequence[int], qD: Sequence[Sequence[int]]):\n # perform QR decomposition and replace A by reshaped Q matrix\n s = A.shape\n assert len(s) == 3\n q0 = qnumber_flatten([qd, qD[0]])\n Q, R, qbond = qr(A.reshape((s[0]*s[1], s[2])), q0, qD[1])\n A = Q.reshape((s[0], s[1], Q.shape[1]))\n # update Anext tensor: multiply with R from left\n Anext = np.tensordot(R, Anext, (1, 1)).transpose((1, 0, 2))\n return (A, Anext, qbond)", "def obtain_Q(self):\n \n #create the initial triangular matrix as a copy of the m x n - matrix A\n \n v_list = Householder.vector(self)\n n_v = len(v_list) # number of vectors, not equal to number of columns in R\n q_m = len(v_list[0]) # longest vector, should determine the shape of Q\n \n H_list = []\n for i in list(range(n_v)):\n \n gamma = ((np.linalg.norm(v_list[i]))**2)/2\n vvtrans = v_list[i] * np.transpose(v_list[i])\n H = np.identity((q_m-i)) - (vvtrans/gamma)\n \n print(H.shape)\n\n m_H, n_H = H.shape\n if m_H < q_m:\n I = np.identity(q_m)\n x = y = i\n I [ x:x+H.shape[0], y:y+H.shape[1]] = H\n H = I\n H_list.append(H)\n \n # The transpose of Q is the result of the dot product H(n-1)...H1 \n \n len_H = len(H_list)\n\n H_temp = H_list[-1]\n \n for i in np.arange(len_H-1,0,-1):\n \n H_temp = np.matmul(H_temp, H_list[i-1])\n \n Q = np.transpose(H_temp)\n \n return(Q)", "def tfqr_grad(a: Array, q: Array, r: Array, dq: Array, dr: Array) -> Array:\n\n if (\n r.shape.ndims is None\n or r.shape.as_list()[-2] is None\n or r.shape.as_list()[-1] is None\n ):\n raise NotImplementedError(\n \"QrGrad not implemented with dynamic shapes. \"\n f\"Received r.shape: {r.shape}\"\n )\n if (\n r.shape.dims[-2].value > r.shape.dims[-1].value\n and q.shape.dims[-2].value == q.shape.dims[-1].value\n ):\n raise NotImplementedError(\n \"QrGrad not implemented when nrows > ncols \"\n \"and full_matrices is true. Received r.shape=\"\n f\"{r.shape} with nrows={r.shape.dims[-2]}\"\n f\"and ncols={r.shape.dims[-1]}.\"\n )\n\n def _TriangularSolve(x: Array, r: Array) -> Array:\n \"\"\"Equivalent to matmul(x, adjoint(matrix_inverse(r))) if r is upper-tri.\"\"\"\n return tf.linalg.adjoint(\n tf.linalg.triangular_solve(\n r, tf.linalg.adjoint(x), lower=False, adjoint=False\n )\n )\n\n def _QrGradSquareAndDeepMatrices(q: Array, r: Array, dq: Array, dr: Array) -> Array:\n \"\"\"\n Get the gradient for matrix orders num_rows >= num_cols and full_matrices is false.\n \"\"\"\n\n # Modification begins\n rdiag = tf.linalg.diag_part(r)\n small_indices = tf.where(tf.math.abs(rdiag) < qr_epsilon)\n length = tf.shape(small_indices)[0]\n newvalues = tf.ones((length,), dtype=rdiag.dtype) * qr_epsilon\n rdiag = tf.tensor_scatter_nd_update(rdiag, small_indices, newvalues)\n delta_r = tf.linalg.set_diag(r, rdiag) - r\n r = r + delta_r\n # delta_dq = math_ops.matmul(q, math_ops.matmul(dr, tf.linalg.adjoint(delta_r)))\n # dq = dq + delta_dq\n # Modification ends\n\n qdq = tf.matmul(q, dq, adjoint_a=True)\n qdq_ = qdq - tf.linalg.adjoint(qdq)\n rdr = tf.matmul(r, dr, adjoint_b=True)\n rdr_ = rdr - tf.linalg.adjoint(rdr)\n tril = tf.linalg.band_part(qdq_ + rdr_, -1, 0)\n\n grad_a = tf.matmul(q, dr + _TriangularSolve(tril, r))\n grad_b = _TriangularSolve(dq - tf.matmul(q, qdq), r)\n ret = grad_a + grad_b\n\n if q.dtype.is_complex:\n m = rdr - tf.linalg.adjoint(qdq)\n eyem = tf.linalg.set_diag(tf.zeros_like(m), tf.linalg.diag_part(m))\n correction = eyem - tf.cast(tf.math.real(eyem), q.dtype)\n ret = ret + _TriangularSolve(tf.matmul(q, tf.linalg.adjoint(correction)), r)\n\n return ret\n\n num_rows, num_cols = q.shape.dims[-2].value, r.shape.dims[-1]\n\n if num_rows >= num_cols:\n return _QrGradSquareAndDeepMatrices(q, r, dq, dr)\n\n y = a[..., :, num_rows:]\n u = r[..., :, :num_rows]\n dv = dr[..., :, num_rows:]\n du = dr[..., :, :num_rows]\n dy = tf.matmul(q, dv)\n dx = _QrGradSquareAndDeepMatrices(q, u, dq + tf.matmul(y, dv, adjoint_b=True), du)\n return tf.concat([dx, dy], axis=-1)", "def qr(obj):\n if isinstance(obj, (core.Dense, numpy.ndarray)):\n Q, R = numpy.linalg.qr(obj)\n return core.Dense(Q), core.Dense(R)\n if isinstance(obj, core.BlockLowRank):\n return _blr_mbgs(obj)\n return NotImplemented", "def _implicitly_restarted_arnoldi(jax: types.ModuleType) -> Callable:\n\n arnoldi_fact = _generate_arnoldi_factorization(jax)\n\n # ######################################################\n # ####### NEW SORTING FUCTIONS INSERTED HERE #########\n # ######################################################\n @functools.partial(jax.jit, static_argnums=(1,))\n def LR_sort(evals, p):\n inds = np.argsort(jax.numpy.real(evals), kind='stable')[::-1]\n shifts = evals[inds][-p:]\n return shifts, inds\n\n @functools.partial(jax.jit, static_argnums=(1,))\n def LM_sort(evals, p):\n inds = np.argsort(jax.numpy.abs(evals), kind='stable')[::-1]\n shifts = evals[inds][-p:]\n return shifts, inds\n\n # #######################################################\n # #######################################################\n # #######################################################\n @functools.partial(jax.jit, static_argnums=(4, 5, 6))\n def shifted_QR(Vm, Hm, fm, evals, k, p, which, res_thresh):\n funs = [LR_sort, LM_sort]\n shifts, _ = funs[which](evals, p)\n # compress to k = numeig\n q = jax.numpy.zeros(Hm.shape[0])\n q = jax.ops.index_update(q, jax.ops.index[-1], 1)\n m = Hm.shape[0]\n\n for shift in shifts:\n Qj, _ = jax.numpy.linalg.qr(Hm - shift * jax.numpy.eye(m))\n Hm = Qj.T.conj() @ Hm @ Qj\n Vm = Qj.T @ Vm\n q = q @ Qj\n\n fk = Vm[k, :] * Hm[k, k - 1] + fm * q[k - 1]\n Vk = Vm[0:k, :]\n Hk = Hm[0:k, 0:k]\n H = jax.numpy.zeros((k + p + 1, k + p), dtype=fm.dtype)\n H = jax.ops.index_update(H, jax.ops.index[0:k, 0:k], Hk)\n Z = jax.numpy.linalg.norm(fk)\n v = fk / Z\n krylov_vectors = jax.numpy.zeros((k + p + 1, Vm.shape[1]), dtype=fm.dtype)\n krylov_vectors = jax.ops.index_update(krylov_vectors, jax.ops.index[0:k, :],\n Vk)\n krylov_vectors = jax.ops.index_update(krylov_vectors, jax.ops.index[k:], v)\n Z = jax.numpy.linalg.norm(fk)\n #if fk is a zero-vector then arnoldi has exactly converged.\n #use small threshold to check this\n return krylov_vectors, H, fk, Z < res_thresh\n\n @functools.partial(jax.jit, static_argnums=(2,))\n def update_data(Vm_tmp, Hm_tmp, numits):\n Vm = Vm_tmp[0:numits, :]\n Hm = Hm_tmp[0:numits, 0:numits]\n fm = Vm_tmp[numits, :] * Hm_tmp[numits, numits - 1]\n return Vm, Hm, fm\n\n @functools.partial(jax.jit, static_argnums=(3,))\n def get_vectors(Vm, unitary, inds, numeig):\n\n def body_vector(i, vals):\n krv, unitary, states, inds = vals\n dim = unitary.shape[1]\n n, m = jax.numpy.divmod(i, dim)\n states = jax.ops.index_add(states, jax.ops.index[n, :],\n krv[m, :] * unitary[m, inds[n]])\n return [krv, unitary, states, inds]\n\n state_vectors = jax.numpy.zeros([numeig, Vm.shape[1]], dtype=Vm.dtype)\n _, _, state_vectors, _ = jax.lax.fori_loop(\n 0, numeig * Vm.shape[0], body_vector,\n [Vm, unitary, state_vectors, inds])\n state_norms = jax.numpy.linalg.norm(state_vectors, axis=1)\n state_vectors = state_vectors / state_norms[:, None]\n return state_vectors\n\n\n def implicitly_restarted_arnoldi_method(\n matvec, args, initial_state, num_krylov_vecs, numeig, which, eps, maxiter,\n res_thresh) -> Tuple[List[Tensor], List[Tensor]]:\n \"\"\"\n Implicitly restarted arnoldi factorization of `matvec`. The routine\n finds the lowest `numeig` eigenvector-eigenvalue pairs of `matvec`\n by alternating between compression and re-expansion of an initial\n `num_krylov_vecs`-step Arnoldi factorization.\n\n Note: The caller has to ensure that the dtype of the return value\n of `matvec` matches the dtype of the initial state. Otherwise jax\n will raise a TypeError.\n\n Args:\n matvec: A callable representing the linear operator.\n args: Arguments to `matvec`. `matvec` is called with\n `matvec(x, *args)` with `x` the input array on which\n `matvec` should act.\n initial_state: An starting vector for the iteration.\n num_krylov_vecs: Number of krylov vectors of the arnoldi factorization.\n numeig: The number of desired eigenvector-eigenvalue pairs.\n which: Which eigenvalues to target. Currently supported: `which = 'LR'`\n or `which = 'LM'`.\n eps: Convergence flag. If the norm of a krylov vector drops below `eps`\n the iteration is terminated.\n maxiter: Maximum number of (outer) iteration steps.\n Returns:\n eta, U: Two lists containing eigenvalues and eigenvectors.\n \"\"\"\n N = np.prod(initial_state.shape)\n p = num_krylov_vecs - numeig\n num_krylov_vecs = np.min([num_krylov_vecs, N])\n if (p <= 1) and (num_krylov_vecs < N):\n raise ValueError(f\"`num_krylov_vecs` must be between `numeig` + 1 <\"\n f\" `num_krylov_vecs` <= N={N},\"\n f\" `num_krylov_vecs`={num_krylov_vecs}\")\n\n dtype = initial_state.dtype\n # initialize arrays\n krylov_vectors = jax.numpy.zeros(\n (num_krylov_vecs + 1, jax.numpy.ravel(initial_state).shape[0]),\n dtype=dtype)\n H = jax.numpy.zeros((num_krylov_vecs + 1, num_krylov_vecs), dtype=dtype)\n # perform initial arnoldi factorization\n Vm_tmp, Hm_tmp, numits, converged = arnoldi_fact(matvec, args,\n initial_state,\n krylov_vectors, H, 0,\n num_krylov_vecs, eps)\n # obtain an m-step arnoldi factorization\n Vm, Hm, fm = update_data(Vm_tmp, Hm_tmp, numits)\n\n it = 0\n if which == 'LR':\n _which = 0\n elif which == 'LM':\n _which = 1\n else:\n raise ValueError(f\"which = {which} not implemented\")\n # make sure the dtypes are matching\n if maxiter > 0:\n if Vm.dtype == np.float64:\n dtype = np.complex128\n elif Vm.dtype == np.float32:\n dtype = np.complex64\n elif Vm.dtype == np.complex128:\n dtype = Vm.dtype\n elif Vm.dtype == np.complex64:\n dtype = Vm.dtype\n else:\n raise TypeError(f'dtype {Vm.dtype} not supported')\n Vm = Vm.astype(dtype)\n Hm = Hm.astype(dtype)\n fm = fm.astype(dtype)\n\n while (it < maxiter) and (not converged):\n evals, _ = jax.numpy.linalg.eig(Hm)\n krylov_vectors, H, fk, converged = shifted_QR(Vm, Hm, fm, evals, numeig,\n p, _which, res_thresh)\n if converged:\n break\n v0 = jax.numpy.reshape(fk, initial_state.shape)\n # restart\n Vm_tmp, Hm_tmp, _, converged = arnoldi_fact(matvec, args, v0,\n krylov_vectors, H, numeig,\n num_krylov_vecs, eps)\n Vm, Hm, fm = update_data(Vm_tmp, Hm_tmp, num_krylov_vecs)\n it += 1\n\n ev_, U_ = np.linalg.eig(np.array(Hm))\n eigvals = jax.numpy.array(ev_)\n U = jax.numpy.array(U_)\n _, inds = LR_sort(eigvals, _which)\n vectors = get_vectors(Vm, U, inds, numeig)\n\n return eigvals[inds[0:numeig]], [\n jax.numpy.reshape(vectors[n, :], initial_state.shape)\n for n in range(numeig)\n ]\n\n return implicitly_restarted_arnoldi_method", "def dlqr(A,B,Q,R):\n #ref Bertsekas, p.151\n \n #first, try to solve the ricatti equation\n X = np.matrix(scipy.linalg.solve_discrete_are(A, B, Q, R))\n \n #compute the LQR gain\n K = np.matrix(scipy.linalg.inv(B.T*X*B+R)*(B.T*X*A))\n \n eigVals, eigVecs = scipy.linalg.eig(A-B*K)\n \n return K, X, eigVals", "def local_orthonormalize_right_qr(A: np.ndarray, Aprev: np.ndarray, qd: Sequence[int], qD: Sequence[Sequence[int]]):\n # flip left and right virtual bond dimensions\n A = A.transpose((0, 2, 1))\n # perform QR decomposition and replace A by reshaped Q matrix\n s = A.shape\n assert len(s) == 3\n q0 = qnumber_flatten([qd, -qD[1]])\n Q, R, qbond = qr(A.reshape((s[0]*s[1], s[2])), q0, -qD[0])\n A = Q.reshape((s[0], s[1], Q.shape[1])).transpose((0, 2, 1))\n # update Aprev tensor: multiply with R from right\n Aprev = np.tensordot(Aprev, R, (2, 1))\n return (A, Aprev, -qbond)", "def QR_apply(X,y,active) :\n assert y.ndim == 2, \"In order to apply householder\"\n \n A = X.copy()\n y = y.copy()\n ix = np.where(active)[0]\n for i,j in enumerate(ix) :\n #print \"loop j:\",j, \"i: \",i\n beta, h = householder(A[i:,j])\n A[i:,j:] = apply_householder(A[i:,j:],beta,h)\n y[i:] = apply_householder(y[i:],beta,h)\n #print \"A: \"\n #print A\n stdout.flush()\n return A", "def magma_sorgqr(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_sorgqr(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def qri_mgs( A, alpha ):\n A = numpy.array(A, dtype=float)\n m,n = A.shape\n Q1 = numpy.zeros( (m,n) )\n R = numpy.zeros( (n,n) )\n\n for k in range( 0, n ) :\n Qhat = A[:,k]\n Qhat2 = Qhat\n ii = 0\n while True : # iterate\n for i in range( 0, k ) :\n s = numpy.dot( Q1[:,i], Qhat )\n Qhat2 = Qhat2 - s * Q1[:,i]\n R[i,k] = R[i,k] + s\n\n ii = ii + 1\n Qhat2len = numpy.linalg.norm( Qhat2 )\n Qhatlen = numpy.linalg.norm( Qhat )\n if (Qhat2len > alpha * Qhatlen) :\n Qhat = Qhat2\n print ii\n break\n Qhat = Qhat2\n \n R[k,k] = numpy.linalg.norm( Qhat )\n Q1[:,k] = Qhat / R[k,k]\n \n return Q1,R", "def magma_zungqr(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_zungqr(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def _compute_Q(self, X_far, X_near, inner_product_far, A):\n\n if inner_product_far is None:\n inner_product_far = X_far @ X_far.T\n\n inner_product_far_near = X_far @ X_near.T\n inner_product_near = X_near @ X_near.T\n\n Q_block = []\n for i in range(X_near.shape[0]):\n Q_block.append([])\n for j in range(X_near.shape[0]):\n if i > j:\n Q_block[i].append(\n Q_block[j][i].T\n )\n else:\n Q_block[i].append(\n inner_product_far\n - inner_product_far_near[:, i][np.newaxis, :]\n - inner_product_far_near[:, j][:, np.newaxis]\n + inner_product_near[i, j]\n )\n return np.block(Q_block)", "def _large_circuit():\n qr = QuantumRegister(9, name='qr')\n cr = ClassicalRegister(9, name='cr')\n circuit = QuantumCircuit(qr, cr)\n\n for i in range(3):\n zero = 3 * i\n first = 3 * i + 1\n second = 3 * i + 2\n\n circuit.x(qr[zero])\n circuit.y(qr[first])\n circuit.z(qr[second])\n\n circuit.h(qr[zero])\n circuit.s(qr[first])\n circuit.sdg(qr[second])\n\n circuit.t(qr[zero])\n circuit.tdg(qr[first])\n circuit.iden(qr[second])\n\n circuit.reset(qr[zero])\n circuit.reset(qr[first])\n circuit.reset(qr[second])\n\n circuit.rx(pi / 8, qr[zero])\n circuit.ry(pi / 8, qr[first])\n circuit.rz(pi / 8, qr[second])\n\n circuit.u1(pi / 8, qr[zero])\n circuit.u2(pi / 8, pi / 8, qr[first])\n circuit.u3(pi / 8, pi / 8, pi / 8, qr[second])\n\n circuit.swap(qr[zero], qr[first])\n\n circuit.cx(qr[zero], qr[first])\n circuit.cy(qr[first], qr[second])\n circuit.cz(qr[second], qr[zero])\n circuit.ch(qr[zero], qr[first])\n\n circuit.cu1(pi / 8, qr[zero], qr[first])\n circuit.cu3(pi / 8, pi / 8, pi / 8, qr[first], qr[second])\n\n circuit.barrier(qr)\n\n circuit.measure(qr, cr)\n\n return circuit", "def inv(in_A):\n Q,R = qr(in_A)\n QT = Q.T\n N = shape(in_A)[0]\n \n for n in range(N-1,-1,-1):\n Rnn = R[n,n]\n R[n,:] /= Rnn\n QT[n,:] /= Rnn\n for m in range(n+1,N):\n Rnm = R[n,m]\n R[n,m] = 0\n QT[n,:] -= QT[m,:]*Rnm\n\n return QT", "def qr(self, leg):\n if leg in self.internallegs:\n raise ValueError(f'{leg} is internal.')\n\n ingoing = self.flowof(leg)\n i1, i2 = self.coupling_id(leg)\n f_id = self.indexes.index(leg)\n\n R = Tensor(self.symmetries)\n Q = Tensor(self.symmetries)\n\n nleg = Leg()\n R.coupling = ((Leg(vacuum=True), True), (leg, True), (nleg, False)) if\\\n ingoing else ((Leg(vacuum=True), True), (nleg, True), (leg, False))\n\n Q.coupling = self.substitutelegs([leg], [nleg])\n Q._indexes = tuple(i if i != leg else nleg for i in self.indexes)\n vacuum = (0,) * len(self.symmetries)\n\n assert R.connections(Q) == set([nleg])\n\n keys = set([k[i1][i2] for k in self])\n transp = list(range(len(self.indexes)))\n transp.pop(f_id)\n transp.append(f_id)\n transp = np.array(transp)\n i_transp = np.argsort(transp)\n\n SU2_ids = self.getSymmIds('SU(2)')\n internals = [\n (ii, jj) for ii, c in enumerate(self.coupling)\n for jj, (l, f) in enumerate(c) if l in self.internallegs and f]\n\n # For scaling of the internal legs\n def ipref(key):\n if SU2_ids:\n return np.prod([np.sqrt(key[x][y][i] + 1)\n for (x, y) in internals for i in SU2_ids])\n else:\n return 1.\n\n for key in keys:\n lpref = np.prod([np.sqrt(key[i] + 1) for i in SU2_ids])\n blocks = [(k, ipref(k)) for k in self if k[i1][i2] == key]\n\n ldim = set(self[k].shape[f_id] for k, _ in blocks)\n # check if dimension is consistent everywhere\n assert len(ldim) == 1\n ldim = ldim.pop()\n\n size = sum(self[k].size for k, _ in blocks)\n assert size % ldim == 0\n\n # Moving all needed blocks into one matrix\n Aarr = [np.transpose(self[block], transp).reshape(-1, ldim) / pref\n for block, pref in blocks]\n\n Aspl = np.cumsum(np.array([r.shape[0] for r in Aarr[:-1]]))\n\n q, r = np.linalg.qr(np.vstack(Aarr))\n newlead = q.shape[-1]\n thiskey = ((vacuum, key, key),)\n R[thiskey] = np.expand_dims((r.T if ingoing else r), axis=0)\n\n # moving back all the blocks into the original tensor\n for (block, pref), x in zip(blocks, np.split(q, Aspl)):\n new_shape = [self[block].shape[x] for x in transp]\n assert new_shape[-1] == ldim\n new_shape[-1] = newlead\n Q[block] = pref * lpref * \\\n np.transpose(x.reshape(new_shape), i_transp)\n\n return Q, R", "def positive_qr(Z):\n Q, R = la.qr(Z)\n D = np.diag(np.sign(np.diag(R)))\n Q = Q.dot(D)\n R = D.dot(R)\n return Q, R", "def dlqr(A,B,Q=None,R=None):\n #ref Bertsekas, p.151\n if Q is None:\n Q = np.eye(A.shape[0])\n if R is None:\n R = np.eye(B.shape[1])\n\n P = scipy.linalg.solve_discrete_are(A, B, Q, R)\n K = -scipy.linalg.solve(B.T.dot(P).dot(B) + R, B.T.dot(P).dot(A), sym_pos=True)\n\n A_c = A + B.dot(K)\n TOL = 1e-5\n if spectral_radius(A_c) >= 1 + TOL:\n print(\"WARNING: spectral radius of closed loop is:\", spectral_radius(A_c))\n\n return P, K", "def busca_base(A, a, q):\n \n \n nrow = np.shape(A)[0]\n zeros = np.zeros((nrow, 1))\n B0, _ = householder(power(A-a*np.identity(nrow), q), zeros)\n \n # calcula el rango de la matriz B0\n r = rango_matrix(B0)\n \n # G = [G1, G2, ..., Gn-r], donde Gi: i-esimo vector generador de N((A-aI)^{q})\n # NOTA: No esta garanizado que {G1, G2, ..., Gn-r}, sean li\n G = np.zeros((nrow, nrow-r))\n for k in range(r):\n # coeff(k, i): coeficiente de xi en la combinacion lineal de los xr+1,xr+2,...,xn, para obtener -B0[k,k]xk\n G[k,:] = - np.array([coeff(k,r+i, B0) for i in range(1, nrow-r+1)])/B0[k,k]\n G[nrow:, :] = np.identity(nrow-r)\n \n return vectores_li(G, r) # retorna solo los vectore li de {G1, G2, ..., Gn-r}, en una matriz", "def solve(n):\n originalboard = makeBoard(n)\n recursiveQueen(0, originalboard, n, n, [])", "def dlqr(a, b, q, r):\n a, b, q, r = map(np.atleast_2d, (a, b, q, r))\n p = scipy.linalg.solve_discrete_are(a, b, q, r)\n\n # LQR gain\n # k = (b.T * p * b + r)^-1 * (b.T * p * a)\n bp = b.T.dot(p)\n tmp1 = bp.dot(b)\n tmp1 += r\n tmp2 = bp.dot(a)\n k = np.linalg.solve(tmp1, tmp2)\n return k, p", "def magma_zungqr_m(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_zungqr_m(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def magma_sormqr(side, trans, m, n, k, A, lda,\n tau, C, ldc, work, lwork):\n info = c_int_type()\n side = _side_conversion[side]\n trans = _trans_conversion[trans]\n status = _libmagma.magma_sormqr(side, trans, m, n, k,\n int(A), lda, int(tau),\n int(C), ldc, int(work), lwork,\n ctypes.byref(info))", "def magma_dorgqr(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_dorgqr(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def _blr_mbgs(obj):\n rnb, cnb = obj.nb\n min_nb = min(obj.nb)\n A = obj.copy()\n Q = core.BlockLowRank(numpy.full((rnb, min_nb), None))\n R = core.BlockLowRank(numpy.full((min_nb, cnb), None))\n\n for i, j in numpy.ndindex(R.nb):\n rows = A[i, i].shape[1]\n cols = A[i, j].shape[1]\n R[i, j] = core.Zero((rows, cols))\n\n for j in range(min_nb):\n Q[:, j], R[j, j] = _blr_tsqr(A[:, j])\n\n for k in range(j + 1, cnb):\n R[j, k] = (Q[:, j].T * A[:, k])[0, 0]\n A[:, k] = A[:, k] - Q[:, j] * core.BlockLowRank([[R[j, k]]])\n\n return Q, R", "def recursiveQueen(row, validList, queensLeft, boardsize, sol_stack):\n #Internal helper functions\n def cleanup(pushed):\n for i in range(0,pushed):\n sol_stack.pop()\n def print_solution():\n printingList = sol_stack[-1*boardsize:] #slice for last 5 solutions\n print \", \".join(str(printingList[i]) for i in range(0, boardsize))\n fancyprint(printingList, boardsize)\n\n #Are we even on the board? Do we even have squares left to test?\n if row > boardsize:\n return False\n elif len(validList) == 0:\n return False\n\n #Save board state \n copiedList = copy.deepcopy(validList)\n pushed = 0\n for col in range(0, boardsize):\n if placeQueen(copiedList, Position(row, col), boardsize):\n sol_stack.append(Position(row,col))\n pushed += 1\n queensLeft = queensLeft - 1\n if queensLeft == 0: \n print \"Solution:\"\n print_solution()\n cleanup(pushed)\n return True\n else: #keep going\n failed = recursiveQueen(row+1, copiedList, queensLeft, boardsize, sol_stack)\n if not failed: #reset list to last case, try next row\n copiedList = copy.deepcopy(validList)\n queensLeft += 1\n pushed -= 1\n sol_stack.pop()\n # do we still have leftovers? clean them up\n cleanup(pushed)\n return False", "def magma_cungqr(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_cungqr(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def matI(a):\n shape=matShape(a)\n if shape[0]!=shape[1]: raise ValueError\n n=shape[0]\n ret=matZeros((n,n*2))\n for i in range(n):\n for j in range(n):\n matSet(ret,i,j,matGet(a,i,j))\n for i in range(n):\n matSet(ret,i,i+n,1)\n for row in range(n):\n rm=row\n ap=abs(matGet(ret,rm,row))\n for rint in range(row+1,n):\n p=abs(matGet(ret,rint,row))\n if ap<p:\n ap=p\n rm=rint\n if 0.000000001 > ap:\n return matCopy(a) # Not invertible\n di=matGet(ret,rm,row)\n if rm!=row:\n for i in range(n*2):\n t=matGet(ret,rm,i)\n matSet(ret,rm,i,matGet(ret,row,i))\n matSet(ret,row,i,t)\n idi=1.0/di\n for rint in range(row+1,n):\n f=idi*matGet(ret,rint,row)\n if f!=0:\n for co in range(row,n*2):\n matSet(ret,rint,co,matGet(ret,rint,co)-f*matGet(ret,row,co))\n row=n-1\n while row>=0:\n ic=1.0/matGet(ret,row,row)\n for rint in range(row):\n icx=ic*matGet(ret,rint,row)\n if icx!=0:\n for co in range(row, n*2):\n matSet(ret,rint,co,matGet(ret,rint,co)-icx*matGet(ret,row,co))\n matSet(ret,row,row,ic*matGet(ret,row,row))\n for co in range(n,n*2):\n matSet(ret,row,co,ic*matGet(ret,row,co))\n row-=1\n return matPart(ret,0,n,n,n*2)", "def qri_mgs_piv( A, alpha=0.5 ):\n \n Q = numpy.array(A, dtype=float)\n m,n = Q.shape\n R = numpy.zeros( (n,n) )\n Qnorms = numpy.zeros( n )\n piv = numpy.zeros( n )\n P = numpy.eye( n )\n\n for k in range( 0, n ) :\n # step 0\n for j in range ( k, n ) :\n Qnorms[j] = numpy.linalg.norm( Q[:,j] )\n #print Qnorms\n j = numpy.where(Qnorms == max(Qnorms[k:n]))[0][0]\n Qnorms[k] = 0\n #print Q\n #print R\n #piv[k] = j\n if (j != k) :\n #print \"switching columns\", k, \"and\", j\n P[:, [j, k]] = P[:, [k, j]]\n Q[:, [j, k]] = Q[:, [k, j]]\n #if (k > 0) :\n # R[0:k, [j, k]] = R[0:k, [k, j]]\n R[:, [j, k]] = R[:, [k, j]]\n #print Q\n #print R\n\n # step 1\n vl2norm = numpy.linalg.norm( Q[:,k] )\n ii = 0\n while True : # iterate\n for i in range( 0, k ) :\n s = numpy.dot( Q[:,i], Q[:,k] )\n Q[:,k] = Q[:,k] - s * Q[:,i]\n R[i,k] = R[i,k] + s\n\n ii = ii + 1\n vlnorm = vl2norm\n vl2norm = numpy.linalg.norm( Q[:,k] )\n if (vl2norm > alpha * vlnorm) :\n #print \"on column\", k, \"used\", ii, \"orthogonalizations\"\n break\n \n # step 2\n R[k,k] = numpy.linalg.norm( Q[:,k] )\n Q[:,k] = Q[:,k] / R[k,k]\n\n # step 3\n if (k == n) :\n break\n else :\n for j in range( k+1, n ) :\n R[k,j] = numpy.dot( Q[:,k], Q[:,j] )\n Q[:,j] = Q[:,j] - R[k,j] * Q[:,k]\n\n # step 4\n #Qhat = Q[:,k]\n #Qhat2 = Qhat\n for j in range( k+1, n ) :\n ii = 0\n vl2norm = numpy.linalg.norm( Q[:,j] )\n while True : # iterate\n s = numpy.dot( Q[:,j], Q[:,k] )\n R[k,j] = R[k,j] + s\n Q[:,j] = Q[:,j] - s * Q[:,k]\n \n ii = ii + 1\n vlnorm = vl2norm\n vl2norm = numpy.linalg.norm( Q[:,j] )\n if (vl2norm > alpha * vlnorm) :\n #print \"on column\", j, \"used\", ii, \"orthogonalizations\"\n break\n \n return Q,R,P", "def magma_zungqr_gpu(m, n, k, A, ldda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_zungqr_gpu(m, n, k, int(A), ldda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def fractalTransformationCG(F,G,M=256,N=50,its=16,\n deBruijn=True,return_Q=False):\n assert isinstance(F,DynamicalSystem) and isinstance(G,DynamicalSystem)\n assert F.check_validity(True,False) and G.check_validity(True,False)\n if deBruijn:\n its = int(its)\n if its>32:\n print(\"fractalTransformationCG: Warning: A very long sequence \"+\n \"length has been requested! (2**\",its,\")\")\n else:\n if its<=30:\n its = int(2.0**its)\n else:\n its = int(its)\n rho = F.get_rho()\n tau_L = F.tau(rho,N+1)\n tau_R = F.tau_plus(rho,N+1)\n sigma = np.zeros(N+1,dtype=np.int8)\n X = np.linspace(0.0,1.0,M+1)\n H = X.copy()\n Q = np.zeros(M+1,dtype=np.int)\n Q[0],Q[M] = N,N # since the end points are always correct\n q,x,y = 0,1.0,1.0\n def address_distance(alpha,beta):\n k = np.argmin(alpha==beta)\n return (beta[k]-alpha[k])*0.5**k\n if deBruijn:\n db_2 = DeBruijnGenerator(2,its)\n #for _ in range(db_2.length()): # beware of overflow!\n while not db_2.is_complete(): # this is better\n sigma = np.roll(sigma,1)\n sigma[0] = db_2()\n if sigma[0]==0:\n x = F.if0(x)\n y = G.if0(y)\n else:\n x = F.if1(x)\n y = G.if1(y)\n if sigma[0]==0:\n if address_distance(sigma,tau_L)<0:\n q = 0\n else:\n if address_distance(tau_R,sigma)<0:\n q = 0\n k = int(0.5+x*M)\n # Should really check k is in the right range (i.e. 0,1,...,M)\n # but this shouldn't happen and is somewhat expensive to check\n if Q[k] < q:\n H[k] = y\n Q[k] = q\n q += 1\n # end while\n else:\n for _ in range(its):\n sigma = np.roll(sigma,1)\n sigma[0] = np.random.randint(2)\n if sigma[0]==0:\n x = F.if0(x)\n y = G.if0(y)\n else:\n x = F.if1(x)\n y = G.if1(y)\n if sigma[0]==0:\n if address_distance(sigma,tau_L)<0:\n q = 0\n else:\n if address_distance(tau_R,sigma)<0:\n q = 0\n k = int(0.5+x*M)\n # Should really check k is in the right range (i.e. 0,1,...,M)\n # but this shouldn't happen and is somewhat expensive to check\n if Q[k] < q:\n H[k] = y\n Q[k] = q\n q += 1\n # end for\n # end if/else\n if return_Q:\n return X,H,Q\n return X,H", "def qft_recursive(qubits):\n qftcirc = Circuit()\n\n # First add the QFT subroutine above\n qftcirc.add(qft_no_swap(qubits))\n\n # Then add SWAP gates to reverse the order of the qubits:\n for i in range(math.floor(len(qubits) / 2)):\n qftcirc.swap(qubits[i], qubits[-i - 1])\n\n return qftcirc", "def magma_sorgqr_m(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_sorgqr_m(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def qri_cgs( A, alpha ):\n \n A = numpy.array(A, dtype=float)\n m,n = A.shape\n Q1 = numpy.zeros( (m,n) )\n R = numpy.zeros( (n,n) )\n\n for k in range( 0, n ) :\n Qhat = A[:,k]\n #ii = 0\n while True : # iterate\n s = numpy.dot( numpy.transpose( Q1 ), Qhat )\n Qhat2 = Qhat - numpy.dot( Q1, s )\n R[:,k] = R[:,k] + s\n\n #ii = ii + 1\n Qhat2len = numpy.linalg.norm( Qhat2 )\n Qhatlen = numpy.linalg.norm( Qhat )\n if ( Qhat2len > alpha * Qhatlen ) :\n Qhat = Qhat2\n #print ii\n break\n Qhat = Qhat2\n\n R[k,k] = numpy.linalg.norm( Qhat )\n Q1[:,k] = Qhat / R[k,k]\n \n return Q1,R", "def incremental_svd(A, qr_flg=False):\n\n m = 256\n n = 7291\n\n n0 = 256\n\n if A.shape[0] != m or A.shape[1] != n: raise ValueError('Error: incorrect matrix size')\n\n start = time.clock()\n\n A0 = A[:, :n0]\n U, s, V = ln.svd(A0, full_matrices=False)\n\n # NOTE: s is a vector; np.diag(s) will produce a diagonal matrix\n for i in range(n0, n):\n\n # new matrix is just a single vector (i-th column of A)\n A1 = np.matrix(A[:, i]).T\n\n if qr_flg:\n J, K = ln.qr(A1 - np.dot(np.dot(U, U.T), A1))\n U_, s_, V_ = ln.svd(\n np.vstack((\n np.hstack((np.diag(s), np.dot(U.T, A1))),\n np.hstack((np.zeros((K.shape[0], s.shape[0])), K))\n )),\n full_matrices=False)\n\n # update the result of SVD\n U = np.dot(np.hstack((U, J)), U_)\n\n else:\n U_, s_, V_ = ln.svd(np.hstack((np.diag(s), np.dot(U.T, A1))), full_matrices=False)\n U = np.dot(U, U_)\n\n s = s_\n\n # NOTE: V from svd on NumPy is already transposed\n V = np.dot(V_,\n np.vstack((\n np.hstack((V, np.zeros((V.shape[0], i+1-V.shape[1])))),\n np.hstack((np.zeros((V_.shape[1]-V.shape[0], V.shape[1])), np.eye(V_.shape[1]-V.shape[0], i+1-V.shape[1])))\n ))\n )\n\n # for next computation, update A0\n A0 = np.hstack((A0, A1))\n\n elapsed_time = time.clock() - start\n print 'time:', elapsed_time\n\n return U, s, V", "def secuencia(R, Q, q):\r\n n = 1\r\n r = []\r\n for qq in q:\r\n for qqq in qq.eps:\r\n r.append(qqq)\r\n r = sorted(r)\r\n\r\n for l in r:\r\n print('la l', l)\r\n Qaux = []\r\n for j in range(len(Q)):\r\n notaux = []\r\n notaux.append(Q[j][0]+j*l[0])\r\n notaux.append(Q[j][1]+(j+1)*l[0])\r\n notaux.append(Q[j][2])\r\n Qaux.append(notaux)\r\n # print(Qaux)\r\n Qaux[-1][1] = R[-1][1]\r\n dibuja(R, Qaux, n)\r\n n += 1", "def slowparts(d, re, preDz, preWz, SRW, RSW, yxV, xyU, resid):\n fprime = lambda x: 1 - power(tanh(x), 2)\n\n partialDU = zeros((d+1, re, 2*d, d))\n for k in range(2*d):\n for i in range(d):\n partialDU[:,:,k,i] = fprime(preDz[k]) * fprime(preWz[i]) * (SRW[i,k] + RSW[i,k]) * yxV[:,:,i]\n\n return partialDU", "def dlqr(A, B, Q, R, gamma=1):\n\n P = scipy.linalg.solve_discrete_are(np.sqrt(gamma) * A, B, Q, R / gamma)\n\n F = gamma * np.matmul(scipy.linalg.inv(np.matmul(np.matmul(B.T, gamma * P), B) + R),\n (np.matmul(np.matmul(B.T, P), A)))\n return -F, P", "def LQR_cost(A, B, K, Q, R, sigma_w):\n\n L = A + B.dot(K)\n if spectral_radius(L) >= 1:\n return 1e6\n\n M = Q + K.T.dot(R).dot(K)\n\n P = solve_discrete_lyapunov(L, M)\n\n return (sigma_w ** 2) * np.trace(P)", "def magma_sorgqr_gpu(m, n, k, A, ldda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_sorgqr_gpu(m, n, k, int(A), ldda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def custom_constr(x, qr, inverse, depth):\n qc = QuantumCircuit(qr)\n maxi, mini = max(x), min(x)\n n = x.shape[0]\n #qc_wv = Wavelets(n).construct_circuit(register=qr)\n for _ in range(depth):\n qc.h(qr)\n for i in range(n):\n qc.u2(np.pi*(x[(i+1) % n]-mini)/(maxi-mini), 2*np.pi*(x[i]-mini)/(maxi-mini), qr[i])\n for i in range(n):\n qc.cx(qr[i], qr[(i + 1) % n])\n qc.u2(np.pi*(x[(i+1) % n]-mini)/(maxi-mini),\n ((2*np.pi)**2*(x[i]-mini)*(x[(i+1) % n]-mini)/(maxi-mini)**2) % 2*np.pi,\n qr[(i + 1) % n])\n qc.cx(qr[i], qr[(i + 1) % n])\n #qc = qc + qc_wv\n if inverse:\n return qc.inverse()\n return qc", "def Q(self):\n self.dualEigenmatrix()", "def magma_dorgqr_m(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_dorgqr_m(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def magma_cungqr_m(m, n, k, A, lda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_cungqr_m(m, n, k, int(A), lda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def lqr(a, b, q, r):\n a, b, q, r = map(np.atleast_2d, (a, b, q, r))\n p = scipy.linalg.solve_continuous_are(a, b, q, r)\n\n # LQR gain\n k = np.linalg.solve(r, b.T.dot(p))\n\n return k, p", "def quatreal(q):\n a = q[0,0]\n b = q[0,1]\n c = q[0,2]\n d = q[0,3]\n amat = a*np.identity(4)\n bmat = b*np.array([[0,1,0,0],[-1,0,0,0],[0,0,0,-1],[0,0,1,0]])\n cmat = c*np.array([[0,0,1,0],[0,0,0,1],[-1,0,0,0],[0,-1,0,0]])\n dmat = d*np.array([[0,0,0,1],[0,0,-1,0],[0,1,0,0],[-1,0,0,0]])\n return amat+bmat+cmat+dmat", "def magma_dorgqr_gpu(m, n, k, A, ldda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_dorgqr_gpu(m, n, k, int(A), ldda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def recursive_multiply(a, b):\n if len(a) == 2:\n return naive_multiply(a, b)\n\n a11 = a[0:int(len(a) / 2)]\n for index, row in enumerate(a11):\n a11[index] = row[0:int(len(row) / 2)]\n\n a12 = a[0:int(len(a) / 2)]\n for index, row in enumerate(a12):\n a12[index] = row[int(len(a) / 2):len(a)]\n\n a21 = a[int(len(a) / 2):len(a)]\n for index, row in enumerate(a21):\n a21[index] = row[0:int(len(row) / 2)]\n\n a22 = a[int(len(a) / 2):len(a)]\n for index, row in enumerate(a22):\n a22[index] = row[int(len(a) / 2):len(a)]\n\n b11 = b[0:int(len(b) / 2)]\n for index, row in enumerate(b11):\n b11[index] = row[0:int(len(row) / 2)]\n\n b12 = b[0:int(len(b) / 2)]\n for index, row in enumerate(b12):\n b12[index] = row[int(len(b) / 2):len(b)]\n\n b21 = b[int(len(b) / 2):len(b)]\n for index, row in enumerate(b21):\n b21[index] = row[0:int(len(row) / 2)]\n\n b22 = b[int(len(b) / 2):len(b)]\n for index, row in enumerate(b22):\n b22[index] = row[int(len(b) / 2):len(b)]\n\n c11 = matrix_add(recursive_multiply(a11, b11), recursive_multiply(a12, b21)) # C11 = A11*B11 + A12*B21\n c12 = matrix_add(recursive_multiply(a11, b12), recursive_multiply(a12, b22)) # C12 = A11*B12 + A12*B22\n c21 = matrix_add(recursive_multiply(a21, b11), recursive_multiply(a22, b21)) # C21 = A21*B11 + A22*B21\n c22 = matrix_add(recursive_multiply(a21, b12), recursive_multiply(a22, b22)) # C22 = A21*B12 + A22*B22\n\n # Append c12 to c11\n for row_index, row in enumerate(c11):\n for col_index, col in enumerate(c12):\n row.append(c12[row_index][col_index])\n\n # Append c22 to c21\n for row_index, row in enumerate(c21):\n for col_index, col in enumerate(c12):\n row.append(c22[row_index][col_index])\n\n # Append c21 to c11\n for i in c21:\n c11.append(i)\n\n return c11", "def lwr_recursion(r):\r\n\r\n # r is (P+1, nc, nc)\r\n nc = r.shape[1]\r\n P = r.shape[0] - 1\r\n\r\n a = np.zeros((P, nc, nc)) # ar coefs\r\n b = np.zeros_like(a) # lp coefs\r\n sigb = np.zeros_like(r[0]) # forward prediction error covariance\r\n sigf = np.zeros_like(r[0]) # backward prediction error covariance\r\n delta = np.zeros_like(r[0])\r\n\r\n # initialize\r\n idnt = np.eye(nc)\r\n sigf[:] = r[0]\r\n sigb[:] = r[0]\r\n\r\n # iteratively find sequences A_{p+1}(i) and B_{p+1}(i)\r\n for p in range(P):\r\n\r\n # calculate delta_{p+1}\r\n # delta_{p+1} = r(p+1) + sum_{i=1}^{p} a(i)r(p+1-i)\r\n delta[:] = r[p + 1]\r\n for i in range(1, p + 1):\r\n delta += np.dot(a[i - 1], r[p + 1 - i])\r\n\r\n # intermediate values XXX: should turn these into solution-problems\r\n ka = np.dot(delta, linalg.inv(sigb))\r\n kb = np.dot(delta.conj().T, linalg.inv(sigf))\r\n\r\n # store a_{p} before updating sequence to a_{p+1}\r\n ao = a.copy()\r\n # a_{p+1}(i) = a_{p}(i) - ka*b_{p}(p+1-i) for i in {1,2,...,p}\r\n # b_{p+1}(i) = b_{p}(i) - kb*a_{p}(p+1-i) for i in {1,2,...,p}\r\n for i in range(1, p + 1):\r\n a[i - 1] -= np.dot(ka, b[p - i])\r\n for i in range(1, p + 1):\r\n b[i - 1] -= np.dot(kb, ao[p - i])\r\n\r\n a[p] = -ka\r\n b[p] = -kb\r\n\r\n sigf = np.dot(idnt - np.dot(ka, kb), sigf)\r\n sigb = np.dot(idnt - np.dot(kb, ka), sigb)\r\n\r\n return a, sigf", "def magma_dormqr_m(ngpu, side, trans, m, n, k, A, lda,\n tau, C, ldc, work, lwork):\n info = c_int_type()\n side = _side_conversion[side]\n trans = _trans_conversion[trans]\n status = _libmagma.magma_dormqr_m(ngpu, side, trans, m, n, k,\n int(A), lda, int(tau),\n int(C), ldc, int(work), lwork,\n ctypes.byref(info))", "def _reparam(self):\n\n k_fe, k_re, k_re2 = self.k_fe, self.k_re, self.k_re2\n k_tot = k_fe + k_re2\n ix = np.tril_indices(self.k_re)\n\n lin = []\n for k in range(k_fe):\n e = np.zeros(k_tot)\n e[k] = 1\n lin.append(e)\n for k in range(k_re2):\n lin.append(np.zeros(k_tot))\n\n quad = []\n for k in range(k_tot):\n quad.append(np.zeros((k_tot, k_tot)))\n ii = np.tril_indices(k_re)\n ix = [(a,b) for a,b in zip(ii[0], ii[1])]\n for i1 in range(k_re2):\n for i2 in range(k_re2):\n ix1 = ix[i1]\n ix2 = ix[i2]\n if (ix1[1] == ix2[1]) and (ix1[0] <= ix2[0]):\n ii = (ix2[0], ix1[0])\n k = ix.index(ii)\n quad[k_fe+k][k_fe+i2, k_fe+i1] += 1\n for k in range(k_tot):\n quad[k] = 0.5*(quad[k] + quad[k].T)\n\n return lin, quad", "def rothesstri(A, b):\n n = shape(A)[0]\n A = hstack([A, b])\n for k in range(n-1):\n r = linalg.norm([ A[k , k] , A[k + 1, k] ])\n if r>0:\n c=A[k, k]/r; s=A[k + 1, k]/r\n A[[k, k + 1],(k + 1):(n + 1)]=[[c, s],[-s, c]]*A[[k, k + 1],(k + 1):(n + 1)]\n A[k, k] = r; A[k+1,k] = 0\n z = A[:, n].copy()\n rbacksolve(A[:, :n], z, n)\n return z", "def alternative_iterative_method(x0, n, gamma, b):\n # Parameters:\n MAX_ITER = 1000\n n2 = n**2\n\n # Creating NxN versions of vector for easier indexing during iteration\n b = b.copy().reshape(n, n)\n b_transposed = b.copy().T\n x0 = x0.copy().reshape(n, n)\n x0_transposed = x0.copy().T\n x1 = x0.copy()\n x1_transposed = x0_transposed.copy()\n\n # No need for M, N, only a smaller tridiagonal system:\n H = scipy.sparse.diags((-1, 2, -1), (-1, 0, 1), shape=(n, n), format=\"csr\")\n gammaI = scipy.sparse.diags((gamma,), (0,), shape=(n, n), format=\"csr\")\n M1 = gammaI + H # Corresponds to both (gI + M) & (gI + N) in equations\n M2 = gammaI - H # Corresponds to both (gI - M) & (gI - N) in equations\n\n # Preallocating RHS of equations\n RHS7 = np.zeros((n, n), dtype=np.float64)\n RHS8 = np.zeros((n, n), dtype=np.float64)\n\n k = 0\n while k < MAX_ITER:\n for i in range(n): # Loading RHS values for Equation (7):\n RHS7[:, i] = scipy.sparse.csr_matrix.dot(M2, x0_transposed[i]) + b_transposed[i]\n for i in range(n): # Solving N independent tridig mat systems related to Eq(7):\n x1[i] = scipy.sparse.linalg.spsolve(M1, RHS7[i])\n RHS8[i] = scipy.sparse.csr_matrix.dot(M2, x1[i]) + b[i] # Loading RHS values for Equation (8):\n for i in range(n): # Solving N independent tridig mat systems related to Eq(8):\n x1_transposed[i] = scipy.sparse.linalg.spsolve(M1, RHS8[:, i])\n\n k += 1\n if np.allclose(x1_transposed, x0_transposed, rtol=1e-8):\n break\n x0_transposed = x1_transposed.copy()\n\n res = x1_transposed.T.reshape(n2)\n return res, k", "def multipleQueensOnRightDiagonals(board):\n (rows, columns) = len(board), len(board[0])\n\n direction = [(+1, -1), (-1, +1)]\n start = [(0, 0), (rows - 1, 1)]\n\n # For each diagonal half of the rectangle\n # i.e. (top-left half, bottom-right half)\n for i, half in enumerate([-1, +1]):\n for column in range(columns):\n\n count = 0\n\n if half == -1:\n # As we traverse the upper left triangle, the diagonal\n # lengths increase as we move towards the right.\n diagonalLength = column + 1\n else:\n # On the other hand, the diagonal lengths decrease as we\n # traverse the lower right half, moving towards the right.\n diagonalLength = columns - column\n\n # Traverse the diagonals.\n for d in range(diagonalLength):\n\n row = start[i][0] + (d * direction[i][0])\n col = column + (d * direction[i][1])\n\n if board[row][col] == 1:\n count += 1\n\n if count > 1:\n return True\n\n return False", "def matrices_QP(l, omega, S, cn, csn, rhos, rho):\n MN = (np.linalg.inv(matrix_M2(l, omega, S, cn, csn, rhos, rho))\n * matrix_N2(l, omega, S, cn)\n )\n KL = (np.linalg.inv(matrix_K2(l, omega, S, cn, csn, rhos, rho))\n * matrix_L2(l, omega, S, cn)\n )\n Q = np.zeros((3,3))\n P = np.zeros((3,3))\n Q[:2,:2] = MN[:2]\n Q[ 3, 3] = KL[0]\n P[:2,:2] = MN[2:]\n P[ 3, 3] = KL[1]\n return Q, P", "def magma_cungqr_gpu(m, n, k, A, ldda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_cungqr_gpu(m, n, k, int(A), ldda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def reciprocal_mat(l_g_go):\n InMat = np.copy(l_g_go)\n InMat = InMat.astype(float)\n\n L3 = np.cross(InMat[:, 0], InMat[:, 1]) / np.linalg.det(InMat)\n L1 = np.cross(InMat[:, 1], InMat[:, 2]) / np.linalg.det(InMat)\n L2 = np.cross(InMat[:, 2], InMat[:, 0]) / np.linalg.det(InMat)\n rl_g_go = np.vstack((L1, L2, L3)).T\n # rl_g_go = Matrix(rl_g_go)\n\n\n return rl_g_go", "def doubleo(A, C, Q, R, tol=1e-15):\n a0 = A.T\n b0 = np.dot(C.T, solve(R, C))\n g0 = Q\n dd = 1.\n ss = max(A.shape)\n v = np.eye(ss)\n\n # NOTE: This is a little hack to make sure we update k1 and k0 properly\n # depending on the dimensions of C\n c_vec = C.shape[0] > 1\n\n while dd > tol:\n a1 = np.dot(a0, solve(v + np.dot(b0, g0), a0))\n b1 = b0 + np.dot(a0, solve(v + np.dot(b0, g0), np.dot(b0, a0.T)))\n g1 = g0 + np.dot(np.dot(a0.T, g0), solve(v + np.dot(b0, g0), a0))\n\n if c_vec:\n k1 = np.dot(A.dot(g1), solve(np.dot(C, g1.T).dot(C.T) + R.T, C).T)\n k0 = np.dot(A.dot(g0), solve(np.dot(C, g0.T).dot(C.T) + R.T, C).T)\n else:\n k1 = np.dot(np.dot(A, g1), C.T / (np.dot(C, g1).dot(C.T) + R))\n k0 = np.dot(A.dot(g0), C.T / (np.dot(C, g0).dot(C.T) + R))\n\n dd = np.max(np.abs(k1 - k0))\n a0 = a1\n b0 = b1\n g0 = g1\n\n return k1, g1", "def magma_sorgqr_2stage_gpu(m, n, k, A, ldda, tau, dT, nb):\n info = c_int_type()\n status = _libmagma.magma_sorgqr_2stage_gpu(m, n, k, int(A), ldda,\n int(tau), int(dT), nb,\n ctypes.byref(info))\n magmaCheckStatus(status)", "def magma_zungqr2(m, n, k, A, lda, tau):\n info = c_int_type()\n status = _libmagma.magma_zungqr2(m, n, k, int(A), lda,\n int(tau), ctypes.byref(info))\n magmaCheckStatus(status)", "def it_matrixpower(p,t,n,root_field=RR):\n assert n>=2, \"Carleman matrix must at least be of size 2 to retrieve the coefficients. But given was \" + repr(n)\n CM = p.carleman_matrix(n)\n ev = CM.charpoly().roots(root_field)\n assert len(ev) == n, \"Carleman matrix must have exactly \" + repr(n) + \"eigenvalues, but has \" + repr(len(ev))\n\n Char = [0]*n\n for k in range(n):\n #here is possibility for improvement of precision\n #to separate the fractional from the root parts\n #expanding the product\n Char[k] = CM - ev[k][0]*identity_matrix(n)\n\n #we want to have the first row of the product of the matrices\n #thatswhy we mulitply in front with:\n prod = vector(p.K,[0,1]+[0]*(n-2))\n prodwo = [0]*n\n for k in range(n):\n prodwo[k]=prod #these are the first terms until k-1\n\n #no need to continue\n if k == n-1:\n break\n\n #and we add the terms starting with k+1\n for i in range(k+1,n):\n prodwo[k] = prodwo[k] * Char[i]\n\n prod = prod * Char[k]\n\n sprodwo = [0]*n\n for k in range(n):\n if k==0:\n sprodwo[k] = ev[k][0] - ev[1][0]\n start = 2\n else:\n sprodwo[k] = ev[k][0] - ev[0][0]\n start = 1\n\n for i in range(start,n):\n if i != k:\n sprodwo[k] = sprodwo[k] * (ev[k][0] - ev[i][0])\n\n res = ev[0][0]**t/sprodwo[0] * prodwo[0]\n for k in range(1,n):\n res += ev[k][0]**t/sprodwo[k]*prodwo[k]\n\n return res.list()", "def __split0_q_loop(\n col: int,\n r_tiles: SquareDiagTiles,\n proc_tile_start: torch.Tensor,\n active_procs: torch.Tensor,\n q0_tiles: SquareDiagTiles,\n q_dict: Dict,\n q_dict_waits: Dict,\n) -> None:\n tile_columns = r_tiles.tile_columns\n diag_process = (\n torch.nonzero(input=proc_tile_start > col, as_tuple=False)[0]\n if col != tile_columns\n else proc_tile_start[-1]\n )\n diag_process = diag_process.item()\n rank = r_tiles.arr.comm.rank\n q0_dtype = q0_tiles.arr.dtype\n q0_torch_type = q0_dtype.torch_type()\n q0_torch_device = q0_tiles.arr.device.torch_device\n # wait for Q tensors sent during the R calculation -----------------------------------------\n if col in q_dict_waits.keys():\n for key in q_dict_waits[col].keys():\n new_key = q_dict_waits[col][key][3] + key + \"e\"\n q_dict_waits[col][key][0][1].Wait()\n q_dict[col][new_key] = [\n q_dict_waits[col][key][0][0],\n q_dict_waits[col][key][1].wait(),\n q_dict_waits[col][key][2].wait(),\n ]\n del q_dict_waits[col]\n # local Q calculation =====================================================================\n if col in q_dict.keys():\n lcl_col_shape = r_tiles.local_get(key=(slice(None), col)).shape\n # get the start and stop of all local tiles\n # -> get the rows_per_process[rank] and the row_indices\n row_ind = r_tiles.row_indices\n prev_rows_per_pr = sum(r_tiles.tile_rows_per_process[:rank])\n rows_per_pr = r_tiles.tile_rows_per_process[rank]\n if rows_per_pr == 1:\n # if there is only one tile on the process: return q_dict[col]['0']\n base_q = q_dict[col][\"l0\"][0].clone()\n del q_dict[col][\"l0\"]\n else:\n # 0. get the offset of the column start\n offset = (\n torch.tensor(\n row_ind[col].item() - row_ind[prev_rows_per_pr].item(), device=q0_torch_device\n )\n if row_ind[col].item() > row_ind[prev_rows_per_pr].item()\n else torch.tensor(0, device=q0_torch_device)\n )\n # 1: create an eye matrix of the row's zero'th dim^2\n q_lcl = q_dict[col][\"l0\"] # [0] -> q, [1] -> shape of a use in q calc (q is square)\n del q_dict[col][\"l0\"]\n base_q = torch.eye(\n lcl_col_shape[r_tiles.arr.split], dtype=q_lcl[0].dtype, device=q0_torch_device\n )\n # 2: set the area of the eye as Q\n base_q[offset : offset + q_lcl[1][0], offset : offset + q_lcl[1][0]] = q_lcl[0]\n\n local_merge_q = {rank: [base_q, None]}\n else:\n local_merge_q = {}\n # -------------- send local Q to all -------------------------------------------------------\n for pr in range(diag_process, active_procs[-1] + 1):\n if pr != rank:\n hld = torch.zeros(\n [q0_tiles.lshape_map[pr][q0_tiles.arr.split]] * 2,\n dtype=q0_torch_type,\n device=q0_torch_device,\n )\n else:\n hld = local_merge_q[pr][0].clone()\n wait = q0_tiles.arr.comm.Ibcast(hld, root=pr)\n local_merge_q[pr] = [hld, wait]\n\n # recv local Q + apply local Q to Q0\n for pr in range(diag_process, active_procs[-1] + 1):\n if local_merge_q[pr][1] is not None:\n # receive q from the other processes\n local_merge_q[pr][1].Wait()\n if rank in active_procs:\n sum_row = sum(q0_tiles.tile_rows_per_process[:pr])\n end_row = q0_tiles.tile_rows_per_process[pr] + sum_row\n # slice of q_tiles -> [0: -> end local, 1: start -> stop]\n q_rest_loc = q0_tiles.local_get(key=(slice(None), slice(sum_row, end_row)))\n # apply the local merge to q0 then update q0`\n q_rest_loc = q_rest_loc @ local_merge_q[pr][0]\n q0_tiles.local_set(key=(slice(None), slice(sum_row, end_row)), value=q_rest_loc)\n del local_merge_q[pr]\n\n # global Q calculation =====================================================================\n # split up the Q's from the global QR calculation and set them in a dict w/ proper keys\n global_merge_dict = (\n __split0_global_q_dict_set(\n q_dict_col=q_dict[col], col=col, r_tiles=r_tiles, q_tiles=q0_tiles\n )\n if rank == diag_process\n else {}\n )\n\n if rank == diag_process:\n merge_dict_keys = set(global_merge_dict.keys())\n else:\n merge_dict_keys = None\n merge_dict_keys = r_tiles.arr.comm.bcast(merge_dict_keys, root=diag_process)\n\n # send the global merge dictionary to all processes\n for k in merge_dict_keys:\n if rank == diag_process:\n snd = global_merge_dict[k].clone()\n snd_shape = snd.shape\n r_tiles.arr.comm.bcast(snd_shape, root=diag_process)\n else:\n snd_shape = None\n snd_shape = r_tiles.arr.comm.bcast(snd_shape, root=diag_process)\n snd = torch.empty(snd_shape, dtype=q0_dtype.torch_type(), device=q0_torch_device)\n\n wait = r_tiles.arr.comm.Ibcast(snd, root=diag_process)\n global_merge_dict[k] = [snd, wait]\n if rank in active_procs:\n # create a dictionary which says what tiles are in each column of the global merge Q\n qi_mult = {}\n for c in range(q0_tiles.tile_columns):\n # this loop is to slice the merge_dict keys along each column + create the\n qi_mult_set = set([(i, c) for i in range(col, q0_tiles.tile_columns)])\n if len(qi_mult_set & merge_dict_keys) != 0:\n qi_mult[c] = list(qi_mult_set & merge_dict_keys)\n\n # have all the q_merge in one place, now just do the mm with q0\n # get all the keys which are in a column (qi_mult[column])\n row_inds = q0_tiles.row_indices + [q0_tiles.arr.gshape[0]]\n q_copy = q0_tiles.arr.larray.clone()\n for qi_col in qi_mult.keys():\n # multiply q0 rows with qi cols\n # the result of this will take the place of the row height and the column width\n out_sz = q0_tiles.local_get(key=(slice(None), qi_col)).shape\n mult_qi_col = torch.zeros(\n (q_copy.shape[1], out_sz[1]), dtype=q0_dtype.torch_type(), device=q0_torch_device\n )\n for ind in qi_mult[qi_col]:\n if global_merge_dict[ind][1] is not None:\n global_merge_dict[ind][1].Wait()\n lp_q = global_merge_dict[ind][0]\n if mult_qi_col.shape[1] < lp_q.shape[1]:\n new_mult = torch.zeros(\n (mult_qi_col.shape[0], lp_q.shape[1]),\n dtype=mult_qi_col.dtype,\n device=q0_torch_device,\n )\n new_mult[:, : mult_qi_col.shape[1]] += mult_qi_col.clone()\n mult_qi_col = new_mult\n\n mult_qi_col[\n row_inds[ind[0]] : row_inds[ind[0]] + lp_q.shape[0], : lp_q.shape[1]\n ] = lp_q\n hold = torch.matmul(q_copy, mult_qi_col)\n\n write_inds = q0_tiles.get_start_stop(key=(0, qi_col))\n q0_tiles.arr.lloc[:, write_inds[2] : write_inds[2] + hold.shape[1]] = hold\n else:\n for ind in merge_dict_keys:\n global_merge_dict[ind][1].Wait()\n if col in q_dict.keys():\n del q_dict[col]", "def q_(w,R,lam=1064.0e-9):\n\n if R!=np.inf:\n q=np.pi*w**2*R/(np.pi*w**2-1j*R*lam)\n else:\n q=1j*np.pi*w**2/lam\n\n return q", "def magma_dorgqr2(m, n, k, A, lda, tau):\n info = c_int_type()\n status = _libmagma.magma_dorgqr2(m, n, k, int(A), lda,\n int(tau), ctypes.byref(info))\n magmaCheckStatus(status)", "def solveNQ(n):\n\n board = []\n for i in range(n):\n for j in range(n):\n board[i][j].append(0)\n\n if solveNQUtil(board, 0, n) == False:\n print(\"Solution does not exist\")\n return False\n printSolution(board)\n return True", "def stable_random_haar_measure_matrix(reduce, full):\n\n z = numpy.zeros((full, full), dtype=numpy.complex128)\n for i in range(full):\n z[i, :] = complex_blob(full, seed=i)\n\n q, r = numpy.linalg.qr(z)\n d = numpy.diagonal(r)\n ph = d / numpy.absolute(d)\n q = numpy.multiply(q, ph, q)\n\n reduced_q = q[0:reduce, :]\n return reduced_q", "def recursion_step(value_n, r_grid, discount):\n\n n = value_n.shape[0]\n r_len = r_grid.shape[0]\n value_n_minus_1 = np.zeros([n - 1, r_len]) # Value function length reduced by 1\n gittins_n_minus_1 = np.zeros(n - 1) # Value function length reduced by 1\n for k in range(0, n - 1):\n a = k + 1 # a in range [1,n-1]\n b = n - k - 1 # b in range [1,n-1]\n value_n_minus_1[k, :] = np.maximum((r_grid / float(1 - discount)),\n (a / float(n)) * (1 + discount * value_n[k + 1, :]) +\n (b / float(n)) * discount * value_n[k, :]\n )\n try:\n # Find first index where Value = (Value of Safe Arm)\n idx_git = np.argwhere((r_grid / float(1 - discount)) == value_n_minus_1[k, :]).flatten()\n gittins_n_minus_1[k] = 0.5 * (r_grid[idx_git[0]] + r_grid[idx_git[0] - 1]) # Take average\n except:\n print(\"Error in finding Gittins index\")\n\n return gittins_n_minus_1, value_n_minus_1", "def construct_Q(self, N, r, t0, tf):\n q = np.zeros((N, N))\n for i in xrange(N): \n for l in xrange(N):\n if i>= r and l>=r: \n q[i,l] = 2*self.prodoftwoterms(i,l,r)*(tf**(i+l-2*r+1) - t0**(i+l-2*r+1))/(i+l-2*r+1)\n return q", "def sq_drij_c(r_i,r_j,latticevec):\n\n dr_pbc = delta_r_c(r_i,r_j,latticevec)\n\n sq_dr = np.dot( dr_pbc,dr_pbc)\n\n return sq_dr", "def qr(nd, mode=\"reduced\"):\n\n assert nd.ndim == 2, \"QR decomposition requires 2 dimensional ndarray\"\n\n if mode not in [\"reduced\", \"r\", \"raw\", \"complete\"]:\n raise ValueError(f\"Unrecognized mode '{mode}' for QR decomposition\")\n\n float_nd = nd.map(lambda x: hl.float64(x))\n ir = NDArrayQR(float_nd._ir, mode)\n if mode == \"raw\":\n return construct_expr(ir, ttuple(tndarray(tfloat64, 2), tndarray(tfloat64, 1)))\n elif mode == \"r\":\n return construct_expr(ir, tndarray(tfloat64, 2))\n elif mode in [\"complete\", \"reduced\"]:\n return construct_expr(ir, ttuple(tndarray(tfloat64, 2), tndarray(tfloat64, 2)))", "def positive_rq(S):\n Q, R = positive_qr(np.flipud(S).T)\n R = np.flipud(R.T)\n Q = np.flipud(Q.T)\n return R[:,::-1], Q", "def iteration(self, A, b, n, mode, l):\n # Step 0, qr factorization of C, and create Q_1, -inverse(rows+lI)\n rows, cols = A.shape\n\n C = self.concatMatrix(A, ma.pow(l, 0.5), cols)\n q, r = np.linalg.qr(C)\n q1 = q[0:rows, 0:cols]\n r1 = r[0:rows, 0:cols]\n\n # coe second approach is: (R^tR)^-1\n coe = - np.linalg.inv(np.dot(np.transpose(r), r))\n\n # Step 1, x_k = inv(R) * transpose(Q_1) * b\n inv_r = np.linalg.inv(r1)\n trans_q1 = np.transpose(q1)\n xk = np.dot(np.dot(inv_r, trans_q1), b)\n term = []\n term.append(self.to_array(xk))\n\n # Step 2, iteration\n if mode == 1:\n sk = xk\n for k in range(1, n + 1):\n sk = np.dot(coe, sk)\n tk = ma.pow(-1, k) * ma.pow(l, k) * sk\n xk = xk + tk\n term.append(self.to_array(xk))\n\n if mode == 2:\n tk = xk\n for k in range(1, n + 1):\n t = l * tk\n tk = np.dot(-coe, t)\n xk = xk + tk\n term.append(self.to_array(xk))\n\n return xk" ]
[ "0.7430045", "0.71422714", "0.6963531", "0.68576527", "0.6795892", "0.66816163", "0.65249866", "0.65225244", "0.65213066", "0.65106", "0.64953566", "0.6453057", "0.64338183", "0.64164525", "0.63908815", "0.63486344", "0.62450737", "0.6244796", "0.62227124", "0.62222993", "0.6195967", "0.61826557", "0.6161012", "0.61306363", "0.6085368", "0.6075899", "0.6059492", "0.5990854", "0.5950785", "0.59446424", "0.59240335", "0.5908834", "0.58344257", "0.5789183", "0.57827854", "0.57744503", "0.5774394", "0.57385504", "0.57335234", "0.5720773", "0.5712656", "0.5705811", "0.56939214", "0.5667583", "0.56552887", "0.56441", "0.56330824", "0.56104213", "0.56009305", "0.5595428", "0.5594892", "0.5590285", "0.5577291", "0.5564012", "0.55535746", "0.5545798", "0.5526481", "0.5525856", "0.55206734", "0.5510289", "0.5509049", "0.55088955", "0.55031073", "0.5491144", "0.5486595", "0.5475132", "0.5473595", "0.5466492", "0.5464734", "0.5464485", "0.54591995", "0.5453291", "0.5448486", "0.5446128", "0.5424995", "0.54221165", "0.5419598", "0.5418888", "0.5401844", "0.53894323", "0.53848344", "0.5376244", "0.5345708", "0.5343432", "0.5336085", "0.53187996", "0.5305779", "0.52980894", "0.5297296", "0.52921236", "0.5290302", "0.5290198", "0.5287755", "0.5282437", "0.5275129", "0.5257987", "0.5257933", "0.5254263", "0.5250757", "0.52501196" ]
0.74643546
0
This is a new version of extend_dims() function of numpy, it extend multiple dims rather than one.
def new_expand_dims(a, axes): # if int is passed, retain the same behaviour if type(axes) == int: return np.expand_dims(a, axes) # insert axes to given indices for ax in sorted(axes): a = np.expand_dims(a, ax) return a
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extend(a, b):\n if np.isscalar(a):\n return a\n # CRUFT: python 2.7 support\n extra_dims = (1,)*(b.ndim-a.ndim)\n return a.reshape(a.shape + extra_dims)\n # python 3 uses\n #extra_dims = (np.newaxis,)*(b.ndim-a.ndim)\n #return a[(..., *extra_dims)]", "def expand_dims(array):\n return array[np.newaxis, np.newaxis, ...]", "def _expand_dims_(x: np.array) -> np.array:\n x = np.expand_dims(x, axis=-1)\n return x", "def add_dims_on_right(arr: array_like, new_axes_on_right: int = 0, ndim: Optional[int] = None):\n arr = np.asarray(arr)\n if ndim is None:\n ndim = arr.ndim + new_axes_on_right\n else:\n new_axes_on_right = ndim - arr.ndim\n if new_axes_on_right > 0:\n return np.expand_dims(arr, tuple(range(arr.ndim, ndim)))\n else:\n return arr.reshape(arr.shape[:ndim])", "def _expand(x, ndim, axis=0):\n while F.rank(x) < ndim:\n x = F.expand_dims(x, axis)\n return x", "def expand_dims(input, axis, _builder=None):\n axis = _constexpr_to_value(axis)\n axes = list(axis) if isinstance(axis, Sequence) else [axis]\n new_ndim = len(input.shape) + len(axes)\n axes = [_wrap_axis(_constexpr_to_value(d), new_ndim) for d in axes]\n\n if len(set(axes)) != len(axes):\n raise ValueError(f\"expand_dims recieved duplicate axes, normalized axes = {axes}\")\n\n ret = input\n for a in sorted(axes):\n ret = semantic.expand_dims(ret, a, _builder)\n return ret", "def nd_expand_dims(x, n=1, before=True):\n if before:\n axes = tuple([np.newaxis] * n + [...])\n else:\n axes = tuple([...] + [np.newaxis] * n)\n return x[axes]", "def expand_dims_twice(array):\n return np.expand_dims(np.expand_dims(array, axis=1), axis=1)", "def expand_dims(array, full_dims):\n \n expand_list = []\n for flag_dim in full_dims:\n if flag_dim in array.coords:\n expand_list.append(np.arange(0, len(array[flag_dim])))\n else:\n expand_list.append(None)\n\n vals = array.values[expand_list]\n\n return vals", "def _add_keepdims(func):\n @functools.wraps(func)\n def wrapped(a, axis, **kwargs):\n res = func(a, axis=axis, **kwargs)\n if axis is None:\n axis = 0 # res is now a scalar, so we can insert this anywhere\n return np.expand_dims(res, axis=axis)\n return wrapped", "def expand_dims(module, array, dimension):\n _import_modules()\n if module in [np, ma, jnp, tf]:\n return module.expand_dims(array, dimension)\n elif module == torch:\n return module.unsqueeze(array, dimension)\n raise UnknownModuleException(f\"Module {module.__name__} not supported.\")", "def add_extra_dim(self, params: ExtraBytesParams) -> None:\n self.add_extra_dims([params])", "def expand_dim(self, dim, extra_internal_domain=None, extra_user_domain=None):\r\n expand_dim = dim - self.multi_index.spatial_dimension\r\n\r\n self.multi_index.expand_dim(dim) # breaks if dim<spacial_dimension, i.e. expand_dim<0\r\n extra_internal_domain = verify_domain(extra_internal_domain, expand_dim)\r\n self.internal_domain = np.concatenate((self.internal_domain, extra_internal_domain))\r\n extra_user_domain = verify_domain(extra_user_domain, expand_dim)\r\n self.user_domain = np.concatenate((self.user_domain, extra_user_domain))", "def extend(inarray, extension, d):\n n = inarray.shape[d]\n reps = concatenate(([extension+1], ones(n-2), [extension+1])).astype(int64)\n return inarray.repeat(reps, axis=d)", "def extend(dset, array):\n length = len(dset)\n newlength = length + len(array)\n dset.resize((newlength,) + array.shape[1:])\n dset[length:newlength] = array\n return newlength", "def _realloc(self, dim, extra=1):\n if self.nump == 0:\n self._alloc(dim)\n elif self.nump + extra > self.maxp - 1:\n oldmaxp = self.maxp\n self.maxp = max([self.maxp*2, self.maxp + extra])\n self.x.resize((self.maxp, dim))\n self.fx.resize((self.maxp, 1))\n # Expand the surrogate lists\n for i in range(self.M):\n for _ in range(self.maxp - oldmaxp):\n self.surrogate_list[i].append(None)", "def subarray_dimensions(self, keys):\n \n if len(keys) != len(self.dims):\n raise ValueError(\"Number of keys must be equal to the number of\" +\n \" dimensions. (Got \" + str(len(keys)) + \"/\"\n + str(len(self.dims)) + \")\")\n\n newDims = DimensionHelper()\n for key, dim in zip(keys, self.dims):\n newDim = dim.subdimension(key)\n if newDim is not None:\n newDims.dims.append(newDim)\n return newDims", "def expand(*arrays):\n arrays = list(map(boundify, arrays))\n assert_schema(arrays, same_dimension=True)\n\n dim_low = list(map(min, zip(*(a.datashape.dim_low for a in arrays))))\n dim_high = list(map(max, zip(*(a.datashape.dim_high for a in arrays))))\n\n result = []\n for a in arrays:\n ds = a.datashape.copy()\n ds.dim_low = dim_low\n ds.dim_high = dim_high\n if ds != a.datashape:\n a = a.redimension(ds.schema)\n result.append(a)\n\n return result", "def expand_many(x, axes):\n for ax in axes:\n x = torch.unsqueeze(x, ax)\n return x", "def expand_dims(self, axis, direction=1):\n res = self.empty_like()\n res.shape.insert(axis, [1])\n res.qhape.insert(axis, [0])\n res.dirs.insert(axis, direction)\n if not self.isscalar():\n for k, v in self.sects.items():\n new_k = list(k)\n new_k.insert(axis, 0)\n res[tuple(new_k)] = np.expand_dims(v, axis)\n elif res.charge == 0:\n res[(0,)] = np.array((res.defval,), dtype=res.dtype)\n res.defval = 0\n return res", "def redimension(ds,new_dims,\n intragroup_dim=None,\n inplace=False,\n save_mapping=False):\n if not inplace:\n ds=ds.copy()\n\n lin_dim=ds[new_dims[0]].dims[0]# the original linear dimension\n orig_dims=[ ds[vname].values.copy()\n for vname in new_dims ]\n Norig=len(orig_dims[0]) # length of the original, linear dimension\n\n uni_new_dims=[ np.unique(od) for od in orig_dims]\n\n for und in uni_new_dims:\n try:\n if np.any(und<0):\n log.warning(\"New dimensions have negative values -- will continue but you probably want to drop those first\")\n except TypeError:\n # some versions of numpy/xarray will not compare times to 0,\n # triggering a TypeError\n pass\n\n # note that this is just the shape that will replace occurences of lin_dim\n new_shape=[len(und) for und in uni_new_dims]\n\n # build up an index array \n new_idxs=[ np.searchsorted(und,od)\n for und,od in zip( uni_new_dims, orig_dims ) ]\n\n if intragroup_dim is not None:\n # here we need to first count up the max number within each 'bin'\n # so new_idxs\n count_per_group=np.zeros(new_shape,'i4')\n intra_idx=np.zeros(Norig,'i4')\n for orig_idx,idxs in enumerate(zip(*new_idxs)):\n intra_idx[orig_idx] = count_per_group[idxs] \n count_per_group[ idxs ]+=1\n n_intragroup=count_per_group.max() # 55 in the test case\n\n # add in the new dimension\n new_shape.append(n_intragroup)\n new_idxs.append(intra_idx)\n\n # negative means missing. at this point, intragroup_dim has not been taken care of\n # mapper: array of the shape of the new dimensions, with each entry giving the linear\n # index into the original dimension\n mapper=np.zeros(new_shape,'i4') - 1\n mapper[ tuple(new_idxs) ] = np.arange(Norig)\n\n # install the new coordinates - first the grouped coordinates\n for nd,und in zip(new_dims,uni_new_dims):\n del ds[nd] # doesn't like replacing these in one go\n ds[nd]= ( (nd,), und )\n if intragroup_dim is not None:\n # and second the new intragroup coordinate:\n new_dims.append(intragroup_dim)\n ds[intragroup_dim] = ( (intragroup_dim,), np.arange(n_intragroup) )\n\n for vname in ds.data_vars:\n if lin_dim not in ds[vname].dims:\n # print(\"Skipping %s\"%vname)\n continue\n # print(vname)\n\n var_new_dims=[]\n var_new_slice=[]\n mask_slice=[]\n for d in ds[vname].dims:\n if d==lin_dim:\n var_new_dims += new_dims\n var_new_slice.append( mapper )\n mask_slice.append( mapper<0 )\n else:\n var_new_dims.append(d)\n var_new_slice.append(slice(None))\n mask_slice.append(slice(None))\n var_new_dims=tuple(var_new_dims)\n var_new_slice=tuple(var_new_slice)\n\n # this is time x nSegment\n # ds[vname].values.shape # 10080,1494\n\n # This is the beast: but now it's including some crap values at the beginning\n new_vals=ds[vname].values[var_new_slice]\n mask=np.zeros_like(new_vals,'b1')\n mask[mask_slice] = True\n\n new_vals=np.ma.array(new_vals,mask=mask)\n\n old_attrs=OrderedDict(ds[vname].attrs)\n # This seems to be dropping the attributes\n ds[vname]=( var_new_dims, new_vals )\n for k in old_attrs:\n if k != '_FillValue':\n ds[vname].attrs[k] = old_attrs[k]\n\n if save_mapping:\n ds['mapping']= ( new_dims, mapper)\n\n return ds", "def _extend_index_dim(input_index, new_index, new_index_max):\n # Construct an iterator from new_index\n if isinstance(new_index, (int, np.integer)):\n it = [new_index]\n else:\n if isinstance(new_index, slice):\n # slices don't work very well with multi-dimensional circular mappings.\n it = _conv_slice_to_list(slice_obj=new_index, stop_def=new_index_max)\n else:\n it = new_index\n # Index extension\n if input_index is None:\n output = []\n for i in it:\n output.append(tuple([i]))\n return output\n else:\n output = []\n for _i in input_index:\n output_row = []\n for i in it:\n output_row.append(tuple(list(_i) + [i]))\n output.append(output_row)\n return output", "def extend_array(l, n):\n l.extend([-1] * n)\n l = l[:n]\n return l", "def _extend_slice(self, sl, extent, dim_max, dim_min=0):\n\n x_start = max(sl.start - extent, dim_min)\n x_end = min(sl.stop + extent, dim_max)\n return np.s_[x_start:x_end]", "def _expand_dims(st, axis):\n if not isinstance(st, structured_tensor.StructuredTensor):\n return tf.expand_dims(st, axis)\n nn_axis = _expand_dims_nonnegative_axis(axis, st.rank)\n if st.rank == 0:\n return _expand_dims_scalar(st)\n if nn_axis == 0:\n # Here, we can add a dimension 1 at the front.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(nrows, nrows))\n elif nn_axis == 1:\n # Again, by partitioning the first dimension into vectors of length 1,\n # we can solve this problem.\n nrows = st.nrows()\n return st.partition_outer_dimension(\n RowPartition.from_uniform_row_length(\n tf.constant(1, dtype=nrows.dtype), nrows))\n else:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Unimplemented: non-negative axis > 1 for _expand_dims\")", "def _extend(self, newlen: int) -> None:\n diff = newlen - len(self)\n if diff > 0:\n self.extend([0] * diff)", "def _automatic_dims(cls, dims, size):\n if dims is None:\n dims = size\n elif np.product(dims) != size:\n raise QiskitError(\"dimensions do not match size.\")\n if isinstance(dims, (int, np.integer)):\n num_qubits = int(np.log2(dims))\n if 2 ** num_qubits == size:\n return num_qubits * (2,)\n return (dims,)\n return tuple(dims)", "def reshape_append_ones (a1, a2):\n l = [a1, a2]\n if (isinstance(a1, numpy.ndarray) and isinstance(a2, numpy.ndarray)):\n len1 = len(a1.shape)\n len2 = len(a2.shape)\n if (len1 == len2 or len1 == 0 or len2 == 0 or\n a1.shape[0] != a2.shape[0]):\n return l;\n elif (len1 < len2):\n d = len1\n maxLength = len2\n i = 0\n else:\n d = len2\n maxLength = len1\n i = 1\n while (d < maxLength):\n l[i] = numpy.expand_dims(l[i], d)\n d = d + 1\n return l", "def squeeze_expand_dim(tensor, axis):\n tensor = torch.squeeze(tensor)\n if len(list(tensor.size())) < 4:\n return tensor.unsqueeze(axis)\n else:\n return tensor", "def add_expanddims(self, input_name, expand_dim, name=None):\n expand_dim = self._maybe_add_const(expand_dim, \"expanddim_axis\")\n return self._build_op('ExpandDims', [input_name, expand_dim], name=name)", "def stack_dims(\n array: xr.DataArray,\n **dimensions: Sequence[Hashable]\n ) -> xr.DataArray:\n\n new_array = array.stack(dimensions).reset_index(tuple(dimensions.keys())) # type: ignore[arg-type]\n\n return new_array.assign_coords({dim: new_array.get_index(dim) for dim in dimensions}) # type: ignore[union-attr]", "def expand_like(arrays: List[np.ndarray], fill: float = -100) -> np.ndarray:\n full_shape = list(arrays[0].shape)\n if len(full_shape) == 1:\n return np.concatenate(arrays)\n full_shape[0] = sum(a.shape[0] for a in arrays)\n full_shape[1] = max(a.shape[1] for a in arrays)\n result = np.full(full_shape, fill)\n row_offset = 0\n for a in arrays:\n result[row_offset : row_offset + a.shape[0], : a.shape[1]] = a\n row_offset += a.shape[0]\n return result", "def extend2(x, ru, rd, cl, cr, extmod):\n rx, cx = int(x.shape[0]), int(x.shape[1])\n\n if extmod == \"per\":\n I = get_permutation_indices(rx, ru, rd)\n y = x[I, :]\n\n I = get_permutation_indices(cx, cl, cr)\n y = x[:, I]\n\n elif extmod == \"qper_row\":\n rx2 = int(round(rx / 2))\n\n y1 = x[rx2:rx, cx-cl:cx]\n y2 = x[:rx2, cx-cl:cx]\n y3 = x[rx2:rx, :cr]\n y4 = x[:rx2, :cr]\n\n y5 = np.concatenate((y1, y2))\n y6 = np.concatenate((y3, y4))\n\n y = np.concatenate((y5, x, y6), axis=1)\n\n I = get_permutation_indices(rx, ru, rd)\n y = y[I, :]\n\n elif extmod == \"qper_col\":\n cx2 = int(round(cx / 2))\n\n y1 = x[rx-ru:rx, cx2:cx]\n y2 = x[rx-ru:rx, :cx2]\n y3 = x[:rd, cx2:cx]\n y4 = x[:rd, :cx2]\n\n y5 = np.concatenate((y1, y2), axis=1)\n y6 = np.concatenate((y3, y4), axis=1)\n\n y = np.concatenate((y5, x, y6), axis=0)\n\n I = get_permutation_indices(cx, cl, cr)\n y = y[:, I]\n\n else:\n y = 0\n print(\"Error! Extmod not available\")\n\n return y", "def right_dimension_pad(array, n):\n if array.ndim >= n:\n return array\n\n nadd = n - array.ndim\n atts = [_new_attribute_label('_dim%i' % i, array) for i in range(nadd)]\n apply_args = [x for item in enumerate(atts) for x in item[::-1]]\n\n ds = array.datashape.copy()\n ds.dim_low = list(ds.dim_low) + ([0] * nadd)\n ds.dim_high = list(ds.dim_high) + ([0] * nadd)\n ds.dim_names = list(ds.dim_names) + atts\n ds.chunk_overlap = list(ds.chunk_overlap) + ([0] * nadd)\n ds.chunk_size = list(ds.chunk_size) + ([1000] * nadd)\n\n return array.apply(*apply_args).redimension(ds.schema)", "def combine_last_two_dim(self, x):\n old_shape = list(x.size())\n a, b = old_shape[-2:]\n new_shape = old_shape[:-2] + [a * b if a and b else None]\n ret = x.contiguous().view(new_shape)\n return ret", "def combine_last_two_dim(self, x):\n old_shape = list(x.size())\n a, b = old_shape[-2:]\n new_shape = old_shape[:-2] + [a * b if a and b else None]\n ret = x.contiguous().view(new_shape)\n return ret", "def combine_last_two_dim(self, x):\n old_shape = list(x.size())\n a, b = old_shape[-2:]\n new_shape = old_shape[:-2] + [a * b if a and b else None]\n ret = x.contiguous().view(new_shape)\n return ret", "def extend(self, extension):\n if not self.regular:\n raise ValueError(\"{} instances must be regular (have bins of \"\n \"equal size and shape) to compute extensions\"\n .format(self.__class__.__name__))\n\n old_edges = self.edges\n new_edges = []\n widths = (numpy.mean(w) for w in self.binwidths)\n for (ext, old_edge, width) in zip(extension, old_edges, widths):\n old_min, old_max = old_edge[(0, -1), ]\n new_start = numpy.arange(old_min - width * ext[0],\n old_min - width * 0.5, width)\n new_end = numpy.arange(old_max + width,\n old_max + width * (ext[1] + 0.5), width)\n new_edge = numpy.concatenate((new_start, old_edge, new_end))\n new_edges.append(new_edge)\n\n # Append remaining unchanged edge arrays\n new_edges += old_edges[len(new_edges):]\n\n return type(self)(new_edges)", "def extend_items(self, name, ext_items, text_key=None):\n if not text_key: text_key = self.text_key\n self.undimensionize()\n name = self._dims_free_arr_name(name)\n cat = self._has_categorical_data(name)\n source0 = self._meta['columns'][self.sources(name)[0]]\n for n_item in ext_items:\n if isinstance(n_item, dict):\n col = list(n_item.keys())[0]\n label = list(n_item.values())[0]\n else:\n col = '{}_{}'.format(name, len(self.sources(name))+1)\n label = n_item\n if self.var_exists(col):\n raise ValueError(\"Cannot add '{}', as it already exists.\".format(col))\n # add column meta\n column = {'name': col,\n 'text': {text_key: ''},\n 'type': source0['type'],\n 'parent': source0['parent'],\n 'properties': {'created': True}}\n if cat:\n column['values'] = source0['values']\n self._meta['columns'][col] = column\n # modify mask meta\n self._meta['masks'][name]['items'].append(\n {'properties': {'created': True},\n 'source': 'columns@{}'.format(col),\n 'text': {text_key: ''}})\n self._meta['sets'][name]['items'].append('columns@{}'.format(col))\n self.set_variable_text(col, label, text_key)\n self._data[col] = '' if source0['type'] == 'delimited set' else np.NaN\n if self._dimensions_comp and not self._dimensions_comp == 'ignore':\n self.dimensionize()\n return None", "def convert_expand_dims(node, **kwargs):\n name, input_nodes, attrs = get_inputs(node, kwargs)\n\n axis = int(attrs.get(\"axis\"))\n\n node = onnx.helper.make_node(\n \"Unsqueeze\",\n input_nodes,\n [name],\n axes=[axis],\n name=name,\n )\n return [node]", "def lift_to_dimension(A,dim):\n\n current_dim = len(A.shape)\n if current_dim>dim:\n raise ValueError('Can only add dimensions, but not remove them')\n\n if current_dim==dim:\n return A\n else:\n return A.reshape([1]*(dim-current_dim)+list(A.shape))", "def collapse_dims(\n arr: JaggedArray, axis: int = -1, inplace: bool = False\n) -> JaggedArray:\n\n assert axis != 0, \"cannot collapse the zero'th axis\"\n\n if axis < 0:\n axis += arr.ndim\n\n keepdims = np.delete(np.arange(arr.ndim), (axis - 1, axis - 2))\n newshape = arr.shape[axis - 2] * arr.shape[axis - 1]\n\n if not keepdims.size:\n shape = np.expand_dims(newshape, 0)\n else:\n shape = np.concatenate([arr.shape[: axis - 2], newshape], axis=1)\n\n if not inplace:\n arr = arr.copy()\n arr.shape = shape\n return arr", "def add_extra_dims(self, params: List[ExtraBytesParams]) -> None:\n self.header.add_extra_dims(params)\n new_point_record = record.PackedPointRecord.from_point_record(\n self.points, self.header.point_format\n )\n self.points = new_point_record", "def _SetDimensions(self):\n self._size = 0\n for variable_ndarray in self._layer.get_weights():\n size = variable_ndarray.size\n self._dimensions.append((variable_ndarray.shape, size))\n self._size += size", "def left_dimension_pad(array, n):\n if array.ndim >= n:\n return array\n nadd = n - array.ndim\n atts = [_new_attribute_label('_dim%i' % i, array) for i in range(nadd)]\n apply_args = [x for item in enumerate(atts) for x in item[::-1]]\n\n ds = array.datashape.copy()\n ds.dim_low = ([0] * nadd) + list(ds.dim_low)\n ds.dim_high = ([0] * nadd) + list(ds.dim_high)\n ds.dim_names = atts + list(ds.dim_names)\n ds.chunk_overlap = ([0] * nadd) + list(ds.chunk_overlap)\n ds.chunk_size = ([1000] * nadd) + list(ds.chunk_size)\n\n return array.apply(*apply_args).redimension(ds.schema)", "def extendArray(lhs=('AnyPin', [], {PinSpecifires.CONSTRAINT: '1', PinSpecifires.ENABLED_OPTIONS: PinOptions.ArraySupported | PinOptions.AllowAny}),\n rhs=('AnyPin', [], {PinSpecifires.CONSTRAINT: '1', PinSpecifires.ENABLED_OPTIONS: PinOptions.ArraySupported | PinOptions.AllowAny})):\n lhs.extend(rhs)\n return lhs", "def subsystem_dims(self):\n return [subsystem.truncated_dim for subsystem in self]", "def promote_empty_dims(ds):\n ds = ds.copy()\n for di in ds.dims:\n if di not in ds.coords:\n ds.coords[di] = ds[di]\n return ds", "def redimension(array, dimensions, attributes, dim_boundaries=None):\n if array.dim_names == dimensions and array.att_names == attributes:\n return array\n dim_boundaries = dim_boundaries or {}\n\n orig_atts = set(array.att_names)\n orig_dims = set(array.dim_names)\n\n to_promote = [d for d in dimensions if d in orig_atts] # att->dim\n to_demote = [a for a in attributes if a in orig_dims] # dim->att\n array = cast_to_integer(array, to_promote)\n\n # need a dummy attribute, otherwise result has no attributes\n if not attributes:\n dummy = _new_attribute_label('__dummy', array)\n array = array.apply(dummy, 0)\n attributes = [dummy]\n\n # build the attribute schema\n new_att = {}\n for r in array.sdbtype.full_rep:\n if r[0] in attributes: # copy schema\n new_att[r[0]] = _att_schema_item(r)\n for d in to_demote: # change attribute to dimension\n new_att[d] = '%s:int64' % d\n\n new_att = ','.join(new_att[a] for a in attributes)\n\n # build the dimension schema\n ds = array.datashape\n new_dim = {}\n for n, l, h, ch, co in zip(ds.dim_names, ds.dim_low, ds.dim_high,\n ds.chunk_size, ds.chunk_overlap):\n h = h if h is not None else '*'\n if n in dimensions:\n new_dim[n] = '{0}={1}:{2},{3},{4}'.format(n, l, h, ch, co)\n\n if to_promote:\n # don't do limits here, too expensive!\n # XXX this does wrong thing if attribute has negative values\n # for k, v in limits(array, to_promote).items():\n for k in to_promote:\n v = dim_boundaries.get(k, (0, '*'))\n new_dim[k] = _dim_schema_item(k, v)\n\n new_dim = ','.join(new_dim[d] for d in dimensions)\n\n schema = '<{0}> [{1}]'.format(new_att, new_dim)\n return array.redimension(schema)", "def _with_space_to_batch_adjust(orig, fill_value, spatial_dims):\n fill_dims = orig.get_shape().as_list()[1:]\n dtype = orig.dtype.as_numpy_dtype\n parts = []\n const_orig = tensor_util.constant_value(orig)\n const_or_orig = const_orig if const_orig is not None else orig\n prev_spatial_dim = 0\n i = 0\n while i < len(spatial_dims):\n start_i = i\n start_spatial_dim = spatial_dims[i]\n if start_spatial_dim > 1:\n # Fill in any gap from the previous spatial dimension (or dimension 1 if\n # this is the first spatial dimension) with `fill_value`.\n parts.append(\n np.full(\n [start_spatial_dim - 1 - prev_spatial_dim] + fill_dims,\n fill_value,\n dtype=dtype))\n # Find the largest value of i such that:\n # [spatial_dims[start_i], ..., spatial_dims[i]]\n # == [start_spatial_dim, ..., start_spatial_dim + i - start_i],\n # i.e. the end of a contiguous group of spatial dimensions.\n while (i + 1 < len(spatial_dims) and\n spatial_dims[i + 1] == spatial_dims[i] + 1):\n i += 1\n parts.append(const_or_orig[start_i:i + 1])\n prev_spatial_dim = spatial_dims[i]\n i += 1\n if const_orig is not None:\n return np.concatenate(parts)\n else:\n return array_ops.concat(parts, 0)", "def augment (self, *args):\n cols = list(self.columns())\n for aug in args:\n try:\n cols.extend(aug.columns())\n except AttributeError:\n cols.append(aug)\n return Matrix(*cols, columns=True)", "def add_reduced_dims(self, method, no_dims, dims_to_use=None, *args, **kargs):\n if not dims_to_use:\n dims_to_use = self.dims\n points = self.get_points(*dims_to_use)\n if method == 'tsne':\n extra_points = calc_tsne(points)\n else:\n from mlabwrap import mlab\n extra_points, mapping = mlab.compute_mapping(\n points, method, no_dims, *args, nout=2, **kargs)\n if method.lower() in ['isomap', 'lle']:\n # we need to convert the index array from matlab to python (and remember\n # that python is 0-based and not 1-based)\n indices = np.subtract(mapping.conn_comp.T[0].astype('int'), 1)\n old_data = self.data[indices,:]\n else:\n old_data = self.data\n new_data = np.concatenate((old_data, extra_points), axis=1)\n extra_dims = ['%s%d' % (method, i) for i in xrange(no_dims)]\n new_dims = self.dims + extra_dims\n return DataTable(new_data, new_dims, self.legends, self.tags.copy())", "def _set_dims(xs, ys, dmax):\n\n xmin = np.min(xs)\n xmax = np.max(xs)\n\n ymin = np.min(ys)\n ymax = np.max(ys)\n\n x_abs = np.abs(xmax - xmin)\n y_abs = np.abs(ymax - ymin)\n\n if x_abs > y_abs:\n step = x_abs / dmax\n x_dim_coords = np.arange(xmin + step, xmax + step, step)\n y_dim_coords = np.arange(ymin + step, ymax + step, step)\n else:\n step = y_abs / dmax\n y_dim_coords = np.arange(ymin + step, ymax + step, step)\n x_dim_coords = np.arange(xmin + step, xmax + step, step)\n\n # y_dim_coords must be flipped\n\n y_dim_coords = y_dim_coords[::-1]\n return x_dim_coords, y_dim_coords, [step, xmin, xmax, ymin, ymax]", "def setDimensions(self, *args):\n return _libsbml.Layout_setDimensions(self, *args)", "def expand_img(ndarray ,left, right, up, down):\n\tif left > 0:\n\t\t#get the neighbor column for duplicate it\n\t\tto_add = ndarray[:, 0]#get the firts column\n\t\tto_add = to_add[None] #add new axis \n\t\tto_add = np.repeat(to_add.T, left, axis = 1)\n\t\t#add the extrapolation to final result\n\t\tndarray = np.concatenate((to_add ,ndarray), axis = 1)\n\n\tif right > 0:\n\t\t#get the neighbor column for duplicate it\n\t\tto_add = ndarray[:, -1] #get the last column\n\t\tto_add = to_add[None] #add new axis\n\t\tto_add = np.repeat(to_add.T, right, axis = 1)\n\t\t#add the extrapolation to final result\n\t\tndarray = np.concatenate((ndarray, to_add), axis = 1)\n\tif up > 0:\n\t\t#get the neighbor row for duplicate it\n\t\tto_add = ndarray[0, :] #get the first row\n\t\tto_add = to_add[None] #add new axis\n\t\tto_add = np.repeat(to_add, up, axis = 0)\n\t\t#add the extrapolation to final result\n\t\tndarray = np.concatenate((to_add, ndarray), axis = 0)\n\tif down > 0:\n\t\t#get the neighbor column for duplicate it\n\t\tto_add = ndarray[-1, :] #get the first row\n\t\tto_add = to_add[None] #add new axis\n\t\tto_add = np.repeat(to_add, down, axis = 0)\n\t\t#add the extrapolation to final result\n\t\tndarray = np.concatenate((ndarray, to_add), axis = 0)\n\n\treturn ndarray", "def combine_last_two_dimensions(x):\r\n old_shape = x.get_shape().dims\r\n a, b = old_shape[-2:]\r\n new_shape = old_shape[:-2] + [a * b if a and b else None]\r\n ret = tf.reshape(x, tf.concat([tf.shape(x)[:-2], [-1]], 0))\r\n ret.set_shape(new_shape)\r\n return ret", "def expand_dim_for_tensor_list(tensor_list, dim_array):\n res_tensor_list = []\n for tensor in tensor_list:\n res_tensor = tensor\n for dim in dim_array:\n res_tensor = tf.expand_dims(res_tensor, dim)\n res_tensor_list.append(res_tensor)\n\n return res_tensor_list", "def test_dimension_size_override(self, nt=100):\n i, j, k = dimify('i j k')\n a = TimeData(name='a', dimensions=(i, j, k))\n one = symbol(name='one', dimensions=(i, j, k), value=1.)\n op = Operator(Eq(a.forward, a + one))\n\n # Test dimension override via the buffered dimenions\n a.data[0] = 0.\n op(a=a, t=6)\n assert(np.allclose(a.data[1], 5.))\n\n # Test dimension override via the parent dimenions\n a.data[0] = 0.\n op(a=a, time=5)\n assert(np.allclose(a.data[0], 4.))", "def change_dimensions(self, ndim):\n\n if self.change_dimensionality:\n self._dimensions = ndim\n else:\n raise ValueError('dimensionality cannot be changed for this'\n 'problem')", "def _fix_squeeze(self, inputs, new_attr):\n axes = new_attr.get('axis')\n op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)\n for i in axes[1:]:\n op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)\n return op", "def updateDims(self, key=\"\", val=None):\n self.setDimension(key, val)\n self.setDimension(\n \"outer_radius\",\n self.getDimension(\"inner_radius\")\n + self.getDimension(\"radius_differential\"),\n )\n self.setDimension(\n \"outer_axial\",\n self.getDimension(\"inner_axial\") + self.getDimension(\"height\"),\n )\n self.setDimension(\n \"outer_theta\",\n self.getDimension(\"inner_theta\")\n + self.getDimension(\"azimuthal_differential\"),\n )", "def add_dummy_batch_dim(x):\n return x.view(1, x.size()[0], x.size()[1], x.size()[2])", "def combine_last_two_dimensions(x):\n old_shape = x.get_shape().dims\n a, b = old_shape[-2:]\n new_shape = old_shape[:-2] + [a * b if a and b else None]\n ret = tf.reshape(x, tf.concat([tf.shape(x)[:-2], [-1]], 0))\n ret.set_shape(new_shape)\n return ret", "def _maybe_expand_dims(x):\n x = tf.convert_to_tensor(x)\n if x.shape == ():\n return tf.expand_dims(x, axis=0)\n return x", "def resize_view_axis(interval, newsize, image_length):\n if newsize < image_length - interval[0]:\n # Window can be expanded without any shift of image or whitespace\n interval[1] = interval[0] + newsize\n elif newsize < image_length:\n # Window can be expanded without whitespace by moving image\n interval[1] = int(image_length)\n interval[0] = interval[1] - newsize\n else:\n # Set maximum along this length\n interval[0] = 0\n interval[1] = int(image_length)", "def reshape(self, input_dims=None, output_dims=None):\n clone = copy.copy(self)\n if output_dims is None and input_dims is None:\n return clone\n if input_dims is not None:\n if np.product(input_dims) != self._input_dim:\n raise QiskitError(\n \"Reshaped input_dims ({}) are incompatible with combined\"\n \" input dimension ({}).\".format(input_dims, self._input_dim))\n clone._input_dims = tuple(input_dims)\n if output_dims is not None:\n if np.product(output_dims) != self._output_dim:\n raise QiskitError(\n \"Reshaped output_dims ({}) are incompatible with combined\"\n \" output dimension ({}).\".format(output_dims, self._output_dim))\n clone._output_dims = tuple(output_dims)\n return clone", "def make_emb_cols(self, dims):\n self.emb_cols = []\n for j in range(1, dims + 1):\n self.emb_cols.append('dw' + str(j - 1))", "def _merge_beam_dim(tensor: Any) ->Any:\n if not isinstance(tensor, torch.Tensor):\n return tensor\n shape = list(tensor.size())\n shape[0] *= shape[1]\n shape.pop(1)\n return tensor.view(tuple(shape))", "def update_dimensions(self, temp):\n # get number of bands\n if hasattr(temp,'t') and hasattr(temp.t, 'size') and temp.t.size > 0:\n self.dimensions[2]=temp.t.size\n self.t=temp.t.copy()\n else:\n self.dimensions[2] = 1\n # calculate y dimensions with new extents\n self.dimensions[0] = np.int64((self.extent[3] - self.extent[2])/self.spacing[1]) + 1\n # calculate x dimensions with new extents\n self.dimensions[1] = np.int64((self.extent[1] - self.extent[0])/self.spacing[0]) + 1\n # calculate x and y arrays\n self.x = self.extent[0] + self.spacing[0]*np.arange(self.dimensions[1])\n self.y = self.extent[2] + self.spacing[1]*np.arange(self.dimensions[0])\n return self", "def atleast_nd(arr, n, front=False):\n arr_ = np.asanyarray(arr)\n ndims = len(arr_.shape)\n if n is not None and ndims < n:\n # append the required number of dimensions to the front or back\n if front:\n expander = (None,) * (n - ndims) + (Ellipsis,)\n else:\n expander = (Ellipsis,) + (None,) * (n - ndims)\n arr_ = arr_[expander]\n return arr_", "def to_dimensions(array, *attributes):\n dims = list(array.dim_names) + [a for a in attributes if a in array.att_names]\n atts = [a for a in array.att_names if a not in attributes]\n return redimension(array, dims, atts)", "def add_batch_dim(nest: types.NestedArray) -> types.NestedTensor:\n return tree.map_structure(lambda x: tf.expand_dims(x, axis=0), nest)", "def ExtendWithZeros(size_added, name=None):\n # This automatically infers the batch size from the shape of the input\n # See https://stackoverflow.com/questions/46465813/creating-constant-value-in-keras\n def func(x):\n batch_size = K.shape(x)[0]\n zeros = K.zeros((1, size_added))\n tiledzeros = K.tile(zeros, (batch_size, 1))\n return K.concatenate([x, tiledzeros])\n return keras.layers.Lambda(func, name=name)", "def extend(self, x) -> None:\n pass", "def resize(self, dims, default=None):\n\t\twidth, height = dims\n\t\tassert isinstance(width, int)\n\t\tassert isinstance(height, int)\n\t\tassert width > 0\n\t\tassert height > 0\n\t\tself.dims = Size(width, height)\n\t\t# TODO maybe clearing is not the best idea, maybe I should keep old values wherever is possible.\n\t\tself.data = [copy.deepcopy(default) for _ in range(width * height)]", "def ensure_dims(array: xr.DataArray, *dimensions: Hashable) -> xr.DataArray:\n missing_dims = set(dimensions) - set(array.dims)\n\n new_dims = defaultdict(list)\n for coord in missing_dims:\n cdim_tuple = array.coords[coord].dims\n\n if len(cdim_tuple) > 1:\n raise ValueError('Multi dimensional coordinates are not supported')\n\n cdim = cdim_tuple[0]\n\n new_dims[cdim].append(coord)\n\n for dim, coords in new_dims.items():\n array = array.set_index({cdim: tuple(coords)}) # type: ignore[assignment]\n\n if len(coords) > 1:\n array = array.unstack(dim)\n\n return array.drop_vars(array.coords.keys() - set(array.dims))", "def extend(self, *args, **kwargs): # real signature unknown\n pass", "def define_dimensions(self) -> None:\n if self.dimensions == 2:\n hoomd.md.update.enforce2d()", "def assign_dimensions(self):\n _d_assignment = {\n \"t\": \"NT\",\n \"x\": \"NX\",\n \"y\": \"NY\",\n \"z\": \"NZ\",\n \"u\": \"NU\",\n \"p\": \"NP\",\n }\n self.dimensions = self.definitions[\"dims\"]\n for key, val in self.dimensions.iteritems():\n if not _d_assignment.has_key(key):\n err_str = \"'{} is not accepted as function argument!\".format(\n key=key\n )\n err_str += \"bailing out ...\"\n raise AttributeError(err_str)\n else: # assign dimensions\n setattr(self, _d_assignment[key], val)", "def as_same_dimension(*arrays):\n ndim = arrays[0].ndim\n for a in arrays:\n if a.ndim == ndim:\n continue\n # XXX could try broadcasting here\n raise ValueError(\"Invalid array dimensions: %s vs %s\" % (ndim, a.ndim))\n return arrays", "def __DimSiz_restriction_correct_ndarray_ndarray4(self):\n\n strTestName = 'The size of a dimension of a Numpy array higher or equal to the size of a dimension of another Numpy array (4) (correct)'\n RxCSObject = _RxCSobject()\n\n # Firstly, let us define a reference parameter\n RxCSObject.paramAddMan('aParameter1', 'Numpy array parameter')\n RxCSObject.paramType('aParameter1', np.ndarray)\n\n # Now, let us define a Numpy Array\n RxCSObject.paramAddMan('parameter1', 'Numpy array parameter')\n RxCSObject.paramType('parameter1', np.ndarray)\n RxCSObject.paramDimHE('parameter1', 'aParameter1', 'pages', 'columns', mul=2)\n\n RxCSObject.parameter1 = np.random.randn(2, 3, 4) # * 5 pages, 3 rows, 4 columns\n RxCSObject.aParameter1 = np.random.randn(4, 3, 1) # 4 pages, 3 rows, * 1 column\n\n self.__parametersCheck_error(RxCSObject, 'correct', strTestName)", "def _expand_dims_nonnegative_axis(axis, rank):\n # Implementation note: equivalent to get_positive_axis(axis, rank + 1)\n if axis < 0:\n new_axis = (1 + rank) + axis\n if new_axis < 0:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Axis out of range: \" + str(axis))\n return new_axis\n elif axis > rank:\n # Note: this is unreachable in the current code.\n raise ValueError(\"Axis larger than rank: \" + str(axis) + \" > \" + str(rank))\n return axis", "def _extend(self, other_field, memo) -> None:\n if other_field.data.ndim != self.data.ndim:\n raise ValueError(\n f\"Field '{self.name}' cannot be extended. Dimensions must be equal. ({other_field.data.ndim} != {self.data.ndim})\"\n )\n\n old_id = id(self.data)\n if self.data.dtype < other_field.data.dtype:\n # Increase size of self.data.dtype before inserting\n new_data = np.insert(self.data.astype(other_field.data.dtype), self.num_obs, other_field.data, axis=0)\n else:\n new_data = np.insert(self.data, self.num_obs, other_field.data, axis=0)\n memo[old_id] = (self.data, new_data)\n self.data = new_data", "def expand_beta(beta, len_disc, len_eco, len_gr): # prev -> formBetaExtNumpy\n beta_ext_asc = beta[0:-3][:]\n beta_disc = beta[-3]\n beta_eco = beta[-2]\n beta_gr = beta[-1]\n beta_ext_disc = beta_disc*np.ones(len_disc)\n beta_ext_eco = beta_eco*np.ones(len_eco)\n beta_ext_gr = beta_gr*np.ones(len_gr)\n beta_ext_new = np.concatenate((beta_ext_asc, beta_ext_disc, beta_ext_eco, beta_ext_gr))\n return beta_ext_new", "def set_derived_configs(self):\n if 'dim' in self.config and self.config['dim'] <= 0:\n self.config['dim'] = self.descriptors['input']['dim']", "def __DimSiz_restriction_correct_ndarray_ndarray_pedantic3(self):\n\n strTestName = 'The size of a dimension of a Numpy array higher or equal to the size of a dimension of another Numpy array [pedantic] (3) (correct)'\n RxCSObject = _RxCSobject()\n\n # Firstly, let us define a reference parameter\n RxCSObject.paramAddMan('aParameter1', 'Numpy array parameter')\n RxCSObject.paramType('aParameter1', np.ndarray)\n\n # Now, let us define a Numpy Array\n RxCSObject.paramAddMan('parameter1', 'Numpy array parameter')\n RxCSObject.paramType('parameter1', np.ndarray)\n RxCSObject.paramDimHE('parameter1', 'aParameter1', 'rows', 'columns', pedantic=1, add=1)\n\n RxCSObject.parameter1 = np.random.randn(4, 3, 4)\n RxCSObject.aParameter1 = np.random.randn(3, 2)\n\n self.__parametersCheck_error(RxCSObject, 'correct', strTestName)", "def _pad_array(da, dim, left=False, boundary=None, fill_value=0.0):\n\n if boundary not in [\"fill\", \"extend\"]:\n raise ValueError(\"`boundary` must be `'fill'` or `'extend'`\")\n\n axis_num = da.get_axis_num(dim)\n shape = list(da.shape)\n shape[axis_num] = 1\n\n base_array = da.data\n index = slice(0, 1) if left else slice(-1, None)\n edge_array = da.isel(**{dim: index}).data\n\n use_dask = has_dask and isinstance(base_array, dsa.Array)\n\n if boundary == \"extend\":\n boundary_array = edge_array\n elif boundary == \"fill\":\n args = shape, fill_value\n kwargs = {\"dtype\": base_array.dtype}\n if use_dask:\n full_func = dsa.full\n kwargs[\"chunks\"] = edge_array.chunks\n else:\n full_func = np.full\n boundary_array = full_func(*args, **kwargs)\n\n arrays_to_concat = [base_array, boundary_array]\n if left:\n arrays_to_concat.reverse()\n\n return concatenate(arrays_to_concat, axis=axis_num)", "def setDims(self, k, name, dims):\n dims = tuple(dims)\n if self.debug:\n print(sub(\"DIMS {:d}: {} <-- {}\", k, name, dims))\n self.sp_dicts.setdefault(k, {})[name] = dims", "def _squeeze_dims(ds):\n ds = ds.squeeze()\n for dim in ['lon', 'lat', 'bnds', 'depth', 'depth_2', 'depth_3']:\n if dim in ds:\n if ds[dim].size <= 1:\n del ds[dim]\n drop = []\n for dim in [\n 'hyai', 'hybi', 'hyam', 'hybm', 'time_bnds', 'lat_bnds', 'lon_bnds'\n ]:\n if dim in ds:\n drop.append(dim)\n ds = ds.drop(drop)\n return ds.squeeze()", "def set_dimensions(self):\n all_dimension_choices = []\n\n #################### Remap Extra Objects ####################\n if self.module_name == \"keras\":\n from keras.initializers import Initializer as KerasInitializer\n from keras.callbacks import Callback as KerasCB\n\n self.init_iter_attrs.append(lambda _p, _k, _v: isinstance(_v, KerasInitializer))\n self.extra_iter_attrs.append(lambda _p, _k, _v: isinstance(_v, KerasCB))\n\n #################### Collect Choice Dimensions ####################\n init_dim_choices = get_choice_dimensions(self.model_init_params, self.init_iter_attrs)\n extra_dim_choices = get_choice_dimensions(self.model_extra_params, self.extra_iter_attrs)\n\n for (path, choice) in init_dim_choices:\n choice._name = (\"model_init_params\",) + path\n all_dimension_choices.append(choice)\n\n for (path, choice) in extra_dim_choices:\n choice._name = (\"model_extra_params\",) + path\n all_dimension_choices.append(choice)\n\n self.dimensions = all_dimension_choices\n\n if self.module_name == \"keras\":\n self.model_extra_params = link_choice_ids(\n self.dummy_layers,\n self.dummy_compile_params,\n self.model_extra_params,\n self.dimensions,\n )", "def SetDimensions(self, p_int, p_int_1, p_int_2, p_int_3, p_int_4, p_int_5, p_int_6):\n ...", "def extend_features( X, Xr, X_degraded = None, i_rng = None, j_rng = None, ms_levels = None):\n\n luma_band_thresh = k-1\n Xl = Xr[:,0]\n if X_degraded is not None:\n mini = np.amin(X_degraded[:,0])\n maxi = np.amax(X_degraded[:,0])\n else:\n mini = np.amin(Xl)\n maxi = np.amax(Xl)\n l_step = (maxi-mini)/k\n for il in range(1,k):\n bp = mini+il*l_step\n if l_step < luma_band_thresh:\n Xl1 = np.zeros(Xl.shape)\n else:\n Xl1 = (Xl>=bp).astype(np.float64)\n Xl1 = Xl1*(Xl-bp)\n Xl1.shape += (1,)\n X = np.concatenate((X,Xl1), axis = 1)\n \n Xd = get_patch_features(ms_levels,i_rng, j_rng)\n X = np.concatenate((X,Xd),axis=1)\n return X", "def keep_dims(self, keep_dims, *, errors=\"raise\"):\n if isinstance(keep_dims, str):\n keep_dims = {keep_dims}\n else:\n keep_dims = set(keep_dims)\n all_dims = set(self.dims)\n if errors == \"raise\":\n missing_dims = keep_dims - all_dims\n if missing_dims:\n raise ValueError(\n \"Dataset does not contain the dimensions: %s\" % missing_dims\n )\n return self.drop_dims([i for i in all_dims if i not in keep_dims])", "def append_time_dim(arr, y_, time_stamps):\n time_arr = np.zeros([arr.shape[0]-time_stamps, int(time_stamps*arr.shape[1])])\n for time_idx, time_ in enumerate(np.arange(time_stamps, arr.shape[0])):\n for time_point in range(time_stamps):\n time_arr[time_idx, time_point*arr.shape[1]:(time_point+1)*arr.shape[1]] = arr[time_-time_point,:]\n return time_arr, y_[time_stamps:]", "def reshape(self, *dims):\n if dims is None or (len(dims) == 1 and dims[0] is None):\n return self\n\n # unpack if necessary\n if len(dims) == 1 and (type(dims[0]) is list or type(dims[0]) is tuple):\n dims = dims[0]\n \n dims_computer = [d if callable(d) else lambda s: s.data(d) for d in dims]\n\n seqs = [s for s in self.unstructured()]\n dimensions = [tuple(d(s) for d in dims_computer) for s in seqs]\n data = {}\n \n for s,d in zip(seqs, dimensions):\n if d in data: data[d].append(s)\n else: data[d] = [s]\n\n return DataArray(data, dims=dims)", "def resize(self, size):\n if len(size) != len(self._Fkernel.shape[1:-1]):\n raise RuntimeError(\"length of resize shape is incorrect.\")\n if not np.all(size >= self._Fkernel.shape[1:-1]):\n raise RuntimeError(\"resize shape is too small.\")\n kernel = self._frequency_2_real()\n kernel_pad = self._zero_pad(kernel, size)\n self._Fkernel = self._real_2_frequency(kernel_pad)\n self.basis._axes_shape = kernel_pad.shape[1:-1]", "def expand_images(X):\n\n X_ex = np.empty((X.shape[0] * X.shape[1], X.shape[2])) * np.nan\n\n for n in range(0, X.shape[2]):\n X_ex[:,n] = X[:,:,n].flatten()\n\n return X_ex", "def squeeze(self, axis: Optional[AxisLike] = None):\n\n if axis is None:\n axis = range(self.shape.shape[0])\n elif axis == -1:\n axis = self.shape.shape[0]\n if not isinstance(axis, Iterable):\n axis = [axis]\n axis = [ax for ax in axis if (self.shape[ax] == 1).all(axis=0)]\n self.shape = np.delete(self.shape, axis, axis=0)\n return self", "def setDimensions(self, *args):\n return _libsbml.BoundingBox_setDimensions(self, *args)", "def extend(self, ext_len, chrom_len):\n mid = find_midpoint(self.start, self.end)\n self.start = max(0, mid - ext_len / 2)\n self.end = self.start + ext_len\n if chrom_len and self.end > chrom_len:\n self.end = chrom_len\n self.start = self.end - ext_len" ]
[ "0.70451933", "0.6982546", "0.673844", "0.66155654", "0.65966463", "0.64626193", "0.6444467", "0.6379055", "0.62785256", "0.62534416", "0.6246189", "0.6212844", "0.61804307", "0.61784613", "0.60776794", "0.5881329", "0.58224505", "0.57709056", "0.57032615", "0.569746", "0.56646603", "0.56108725", "0.56040627", "0.5544607", "0.55291796", "0.5512313", "0.54535747", "0.5424974", "0.54201657", "0.5397136", "0.5393403", "0.5392815", "0.53777665", "0.5345087", "0.532259", "0.532259", "0.532259", "0.53050804", "0.53012186", "0.5295936", "0.5291142", "0.5289959", "0.5282013", "0.5263588", "0.5260843", "0.5236603", "0.5224991", "0.5212673", "0.52112466", "0.52014285", "0.51978374", "0.51908684", "0.51708984", "0.51705873", "0.5166327", "0.5147909", "0.51260346", "0.51227784", "0.5119185", "0.51157814", "0.5114111", "0.5098211", "0.5089897", "0.5061229", "0.50526005", "0.5052116", "0.5048523", "0.5044548", "0.50387156", "0.5026899", "0.5025291", "0.5025099", "0.50233877", "0.50074095", "0.49921453", "0.49809918", "0.49743342", "0.49624756", "0.49595314", "0.4957754", "0.49464467", "0.49361402", "0.49321988", "0.49304393", "0.49169946", "0.49070898", "0.48994988", "0.48993742", "0.48899108", "0.48896602", "0.48884535", "0.48864657", "0.4884413", "0.48834255", "0.48791128", "0.48787272", "0.48773825", "0.48769572", "0.48767135", "0.48766357" ]
0.642201
7
Populates CLOUDS variable with mappings from providers to clouds
def _populate_clouds(): for key, value in list(globals().items()): if not key.startswith('_') and key.endswith( 'Cloud') and key != 'Cloud': if not value._controller_cls: continue if issubclass(value, Cloud) and value is not Cloud: CLOUDS[value._controller_cls.provider] = value # Add aliases to CLOUDS dictionary for key, value in config.PROVIDERS.items(): driver_name = value['driver'] cloud_aliases = [key] + value['aliases'] if CLOUDS.get(driver_name): for alias in cloud_aliases: CLOUDS[alias] = CLOUDS[driver_name] else: value = next((CLOUDS.get(alias) for alias in cloud_aliases if CLOUDS.get(alias)), None) if value: for alias in cloud_aliases: CLOUDS[alias] = value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_clouds(auth=None):\n get_operator_cloud(auth)\n get_openstack_cloud(auth)", "def setup_clouds(auth=None):\n get_operator_cloud(auth)\n get_openstack_cloud(auth)", "def get_clouds():\n clouds = [ x.get('cloud') for x in Schedconfig.objects.values('cloud').distinct() ]\n locale.setlocale(locale.LC_ALL, '')\n clouds = sorted(clouds, key=locale.strxfrm)\n return clouds", "def cloud_config():\n return Namespace(\n project=os.environ.get('GCLOUD_PROJECT'),\n storage_bucket=os.environ.get('CLOUD_STORAGE_BUCKET'),\n client_secrets=os.environ.get('GOOGLE_CLIENT_SECRETS'),\n bigtable_instance=os.environ.get('BIGTABLE_CLUSTER'),\n api_key=os.environ.get('API_KEY'))", "def _get_services(self):\n from googleapiclient.discovery import build as discovery_build\n from oauth2client.client import (\n GoogleCredentials,\n ApplicationDefaultCredentialsError,\n )\n from google.cloud import storage\n\n # Credentials must be exported to environment\n try:\n creds = GoogleCredentials.get_application_default()\n except ApplicationDefaultCredentialsError as ex:\n log_verbose_traceback(ex)\n raise ex\n\n # Discovery clients for Google Cloud Storage and Life Sciences API\n self._storage_cli = discovery_build(\"storage\", \"v1\", credentials=creds)\n self._compute_cli = discovery_build(\"compute\", \"v1\", credentials=creds)\n self._api = discovery_build(\"lifesciences\", \"v2beta\", credentials=creds)\n self._bucket_service = storage.Client()", "def setup(self):\n base = automap_base()\n engine = create_engine(\"mysql+pymysql://\" + csconfig.config.db_user + \":\" +\n csconfig.config.db_password + \"@\" +\n csconfig.config.db_host + \":\" +\n str(csconfig.config.db_port) +\n \"/\" + csconfig.config.db_name)\n base.prepare(engine, reflect=True)\n session = Session(engine)\n cloud_yaml = base.classes.csv2_group_resource_yaml\n\n for cloud in self.group_resources:\n cloud_yamls = session.query(cloud_yaml).\\\n filter(cloud_yaml.group_name == self.name,\n cloud_yaml.cloud_name == cloud.cloud_name)\n cloud_yaml_list = []\n for yam in cloud_yamls:\n cloud_yaml_list.append([yam.yaml_name, yam.yaml, yam.mime_type])\n if cloud.cloud_type == 'localhost':\n newcloud = cloudscheduler.localhostcloud.LocalHostCloud(extrayaml=cloud_yaml_list, resource=cloud)\n else:\n newcloud = cloudscheduler.openstackcloud.\\\n OpenStackCloud(extrayaml=cloud_yaml_list, resource=cloud)\n self.clouds[newcloud.name] = newcloud\n self.log.debug(\"Added all clouds for group: %s\", self.name)", "def test_get_cloud(self):\n pass", "def test_get_clouds(self):\n pass", "def create_data_providers():\n prov_dict = {}\n with custom_mp_config(\n get_test_data_path().parent.joinpath(\"msticpyconfig-test.yaml\")\n ):\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\", category=UserWarning)\n if _KQL_IMP_OK:\n prov_dict[\"az_sent_prov\"] = QueryProvider(\"MSSentinel\")\n prov_dict[\"mdatp_prov\"] = QueryProvider(\"MDE\")\n if _SPLUNK_IMP_OK:\n prov_dict[\"splunk_prov\"] = QueryProvider(\"Splunk\")\n prov_dict[\"ti_lookup\"] = TILookup()\n prov_dict[\"geolite\"] = GeoLiteLookup()\n\n if _IPSTACK_IMP_OK:\n prov_dict[\"ip_stack\"] = ip_stack_cls()\n return prov_dict", "def _initialize(self):\n configured_providers = self.domain.config[\"DATABASES\"]\n provider_objects = {}\n\n if configured_providers and isinstance(configured_providers, dict):\n if \"default\" not in configured_providers:\n raise ConfigurationError(\"You must define a 'default' provider\")\n\n for provider_name, conn_info in configured_providers.items():\n provider_full_path = conn_info[\"PROVIDER\"]\n provider_module, provider_class = provider_full_path.rsplit(\n \".\", maxsplit=1\n )\n\n provider_cls = getattr(\n importlib.import_module(provider_module), provider_class\n )\n provider = provider_cls(provider_name, self.domain, conn_info)\n\n provider_objects[provider_name] = provider\n\n self._providers = provider_objects", "def recognizing_clouds(cat):\n print(\"length in helpers\", len(cat))\n # finding clouds in catalogs\n base_table = pd.read_excel(r'/Users/shlomo/Desktop/Thesis/pythonProject/Combined plots/Other '\n r'catalogs/Table_Zucker.xlsx')\n\n names_col = []\n for cloud_number in range(len(base_table)):\n cloud_name = base_table[\"cloud\"][cloud_number]\n # getting locations from catalog\n locations_xyz = [base_table[\"x_pc\"][cloud_number], base_table[\"y_pc\"][cloud_number],\n base_table[\"z_pc\"][cloud_number]]\n # locations_lb = [base_table['l'][cloud_number], base_table['b'][cloud_number]]\n result = find_apt_line(locations_xyz, cat)[0]\n names_col.append([result, cloud_name])\n print([result, cloud_name])\n\n add_names(cat, names_col)\n return cat", "def _get_service_cloud(self, api_url: str) -> str:\n if api_url == \"https://cp-api.bons.ai\":\n return \"Prod\"\n elif api_url == \"https://stagingkube-cp-api.azdev.bons.ai\":\n return \"Staging\"\n elif api_url == \"https://preprodkube-cp-api.aztest.bons.ai\":\n return \"Preprod\"\n else:\n return \"Unknown api_url {}\".format(api_url)", "def get_cloud_provider(providername):\n return jsonify(admin.get_provider(current_app.scoped_session(), providername))", "def _build_observation_providers(self) -> Dict[str, ObservationProvider]:\n pass", "def _extract_catalog(self, data):\n interface = 'public'\n catalog = data['token']['catalog']\n service_map = {}\n for service in catalog:\n service_endpoint = None\n for endpoint in service['endpoints']:\n if endpoint['interface'] == interface:\n service_endpoint = endpoint['url']\n break\n if service_endpoint:\n service_map[service['type']] = service_endpoint\n LOG.debug('Service catalog: %s' % service_map)\n return service_map", "def cloud_information(self):\n url = \"%s/state/teams/%s/cloud\" % (self.url, self.identifier, )\n return perform_request(url)", "def get_providers(self):\n datasets = [\n \"Heineken\",\n \"Eisenbahn\",\n \"Corona\",\n \"Brahma\",\n \"Skol\",\n \"Bohemia\"\n ]\n return datasets", "def obj_initialization(cls):\n listimdata = cls.retrieve_json()\n for elem in listimdata:\n CloudCtx.retrieve_from_json(elem)", "def get_providers(self):\n return [\"Rainfall\", \"Average Rainfall Sea\", \"Average Rainfall Land\"]", "def __init__(self, name, group_resources, group_yamls):\n self.log = logging.getLogger(__name__)\n self.name = name\n self.clouds = {}\n self.group_resources = group_resources\n self.group_yamls = group_yamls", "def get_gcp(resultset, providerversion):\n rundate = datetime.datetime.utcnow()\n providerversion[\"GCP\"] = rundate.strftime('%Y/%m/%d %H:%M:%S')\n resultset = parse_digtxt(GCPROOT,resultset)\n return resultset, providerversion", "def get_providers(self):\n return [\"Wind\", \"Average Wind Sea\", \"Average Wind Land\"]", "def get_providers(self):\n \n r = requests.get(\n self._url('/dataproviders'),\n headers={'Authorization': self.token},\n proxies=self.proxy)\n r.raise_for_status()\n providers = r.json()\n self.providers = [p['name'] for p in providers if (p['user'] is not None and p['user']!='SCRIPTING ENGINE')]\n log.info('{:d} providers found'.format(len(self.providers)))\n\n return", "def init_cloud_api(self, args=None):\n pass", "def create_cloud_provider(providername):\n backend_name = request.get_json().get(\"backend\")\n service_name = request.get_json().get(\"service\")\n response = jsonify(\n admin.create_provider(\n current_app.scoped_session(),\n providername,\n backend=backend_name,\n service=service_name,\n )\n )\n return response", "def cloud_type(self) -> str:\n return pulumi.get(self, \"cloud_type\")", "def test_get_cloud_resources(self):\n pass", "def read_callback():\n\n # Walk through the existing environments\n for name in ENVIRONMENT:\n env = ENVIRONMENT[name]\n collectd.info(\"read_callback: entering environment: \" + name)\n\n # Connects to vCenter Server\n service_instance = SmartConnect(\n host=env[\"host\"], user=env[\"username\"], pwd=env[\"password\"]\n )\n performance_manager = service_instance \\\n .RetrieveServiceContent() \\\n .perfManager\n\n # Walk through all Clusters of Datacenter\n for datacenter in service_instance \\\n .RetrieveServiceContent() \\\n .rootFolder.childEntity:\n if datacenter._wsdlName == \"Datacenter\":\n for compute_resource in datacenter.hostFolder.childEntity:\n if compute_resource._wsdlName == \\\n \"ComputeResource\" \\\n or compute_resource._wsdlName == \\\n \"ClusterComputeResource\":\n cluster_name = \\\n compute_resource.name if env['use_friendly_name'] \\\n else compute_resource._moId\n # Walk throug all hosts in cluster, collect its metrics\n # and dispatch them\n collectd.info(\n \"read_callback: found %d hosts in cluster %s\" % (\n len(compute_resource.host),\n compute_resource.name\n )\n )\n if len(env['host_counter_ids']) > 0:\n collet_metrics_for_entities(\n service_instance,\n performance_manager,\n env['host_counter_ids'],\n compute_resource.host,\n cluster_name,\n env\n )\n\n # Walk throug all vms in host, collect its metrics and\n # dispatch them\n for host in compute_resource.host:\n if host._wsdlName == \"HostSystem\":\n collectd.info(\n \"read_callback: found %d vms in host %s\" % (\n len(host.vm), host.name\n )\n )\n if len(env['vm_counter_ids']) > 0:\n collet_metrics_for_entities(\n service_instance,\n performance_manager,\n env['vm_counter_ids'],\n host.vm,\n cluster_name,\n env\n )\n Disconnect(service_instance)", "def cloud_map(sky):\n cloud_map = {\n 'NSC': 0,\n 'NCD': 0,\n 'CLR': 0,\n 'FEW': 2,\n 'SCT': 6,\n 'BKN': 8,\n 'OVC': 10\n }\n return list(map(lambda s: (cloud_map[s[0]], s[1].value() if s[1] else 0), sky))", "def merge(pointclouds, pctype=PointCloud):\n sizes = [len(pc) for pc in pointclouds]\n arr = np.empty((3, sum(sizes)), dtype=_DTYPE)\n \n # Build up array from pcs\n i = 0\n for pc, size in zip(pointclouds, sizes):\n j = i + size\n arr[:,i:j] = pc.arr\n i = j\n return pctype(arr)", "def got_info(self, cloud_obj):", "def __init__(self, name, supply_centers):\r\n self.countries = {}\r\n self.name = name\r\n self.supply_centers = supply_centers", "async def init_provider(self):\n self.dsp_name = \"OpenStack\"\n await self._provider.init(image_names=self.config[\"images\"].values())", "def test_get_team_owned_clouds(self):\n pass", "def populate_datacenter_cache(self):\n self._datacenter_cache = {}\n dcs = Datacenter.search()\n for dc in dcs:\n self._datacenter_cache[dc.api_id] = dc", "def cloud(self, cloud):\n\n self._cloud = cloud", "def fill_cites(self):\n response = requests.get(\"https://restcountries.eu/rest/v2/all\")\n json_content = json.loads(response.text)\n i = 0\n for t in json_content:\n currency = t[\"currencies\"][0][\"code\"]\n pop = t[\"population\"]\n state_name = t[\"name\"]\n self.cities_from_api[t[\"capital\"].lower()] = [str(state_name), str(currency), str(pop)]", "def get_storage_providers_2(self, **kw):\n return (200, {}, {\"storage_provider\":\n {\n \"backend_type\": \"svc\",\n \"volume_count\": \"null\",\n \"service\": {\n \"host_display_name\": \"shared_v7000_1\",\n \"host\": \"shared_v7000_1\",\n \"id\": 4\n },\n \"backend_id\": \"00000200A0204C30\",\n \"health_status\": {\n \"health_value\": \"OK\"\n },\n \"free_capacity_gb\": 873.5,\n \"total_capacity_gb\": 1115.5,\n \"storage_hostname\": \"shared_v7000_1\",\n \"id\": 2,\n \"backend_state\": \"running\"\n }})", "def cloud_platform(self, cloud_platform):\n\n self._cloud_platform = cloud_platform", "def provider_list(cls, args, config):\n # print \"MOLNSProvider.provider_list(args={0}, config={1})\".format(args, config)\n providers = config.list_objects(kind='Provider')\n if len(providers) == 0:\n print \"No providers configured\"\n else:\n table_data = []\n for p in providers:\n table_data.append([p.name, p.type])\n # table_print(['name', 'type'], table_data)\n r = {'type': 'table', 'column_names': ['name', 'type'], 'data': table_data}\n return r", "def tag_cloud():\n\n return LOAD('plugin_wiki','cloud')", "def cloud_name(self, cloud_name):\n\n self._cloud_name = cloud_name", "def _retrieve(self):\n projects = (proj_dao\n .ProjectDao(self.global_configs)\n .get_projects(self.cycle_timestamp))\n backend_services = {}\n for project in projects:\n project_backend_services = self.safe_api_call(\n 'get_backend_services', project.id)\n if project_backend_services:\n backend_services[project.id] = project_backend_services\n\n return backend_services", "def get_cloud_detail(sky):\n debug(\"Getting cloud details\")\n clouds = cloud_map(sky)\n debug(\"There are {} clouds listed in the Metar\".format(len(clouds)))\n thickest = thickest_clouds(clouds)\n debug(\"Found thickest clouds: thick: {} -- base {}\".format(thickest[0], thickest[1]))\n return {\n \"thickness\": thickest[0],\n \"base\": thickest[1]\n }", "def __init__(self, cloud_info, owner=None, date_str=None):\n\n # this is stored so it can be used to initialize the thread specific driver\n self.cloud_info = cloud_info\n self.cloud = cloud_info[0]\n self.key = cloud_info[1]\n self.secret = None\n # self.container_name = 'smcs-123'\n if self.isJson(cloud_info[2]):\n self.secret = cloud_info[2]['private_key']\n else:\n self.secret = cloud_info[2]\n\n self.cls = None\n if self.cloud in providers.DRIVERS:\n self.cls = get_driver(self.cloud)\n else:\n print(\"cloud\", cloud_info[0], \"not supported\")\n\n self.driver = self.cls(*cloud_info[1:])\n self.container = None\n\n self.containers = self.listContainersWithPrefix('smcs-')\n if not self.containers:\n self.containers.append(self.createContainer())\n\n self.files = []\n self.metaData = {'meta_data': {}}\n self.setMetaData(owner, date_str)\n pass", "def name_places(self):\n self.city_names = {}\n self.region_names = {}\n for city in self.cities:\n self.city_names[city] = self.lang.name(\"city\")\n for region in np.unique(self.territories):\n self.region_names[region] = self.lang.name(\"region\")", "def _fetch_providers(self, looking_for, providers=None):\n if providers is None:\n providers = self._reverse_mapping.get(looking_for, [])\n default_providers = []\n atom_providers = []\n for p in providers:\n if p.name in (_TRANSIENT_PROVIDER, self.injector_name):\n default_providers.append(p)\n else:\n atom_providers.append(p)\n return default_providers, atom_providers", "def convertcloud(points):\n pcd = open3d.geometry.PointCloud()\n pcd.points = open3d.utility.Vector3dVector(points)\n return pcd", "def initialize_client():\n logging.info('Initializing Sendgrid provider')\n sendgrid_authentication, sendgrid_username = get_provider_credentials('sendgrid') \n sendgrid_provider = SendGridProvider(sendgrid_authentication, sendgrid_username)\n\n logging.info('Initializing Mailgun provider')\n mailgun_authentication, mailgun_domain = get_provider_credentials('mailgun')\n mailgun_provider = MailGunProvider(mailgun_authentication, mailgun_domain)\n\n logging.info('Registering providers')\n client.register_provider(sendgrid_provider, 10)\n client.register_provider(mailgun_provider, 20)", "def main():\n\n data = get_data(URL)\n\n if not data:\n raise ValueError('No data to process')\n\n datacenters = [\n Datacenter(key, value)\n for key, value in data.items()\n ]\n\n pass # the rest of your logic here", "def get_supported_services(sites): \n services_key_mapping_list = []\n supported_services = SupportedServices()\n for site in sites:\n key = re.match('^(http|https)://(.*).(com|net|org)$',site['site_url']).group(2)\n supported_services.keys.append(key)\n services_key_mapping_list.append((key, site))\n \n supported_services.info = dict(services_key_mapping_list)\n 'need to order keys'\n supported_services.keys = order_supported_services_keys(supported_services.keys)\n return supported_services", "def __init__(self, allow_replace=False):\n self.providers = {}\n self.allow_replace = allow_replace", "def create_cloud_constants(self):\n np_clouds = [self.dataset.clouds[n] for n in self.dataset.cloud_names]\n # Create the slice indices as float32, as they'll only be used with\n # tf.gather which has no GPU kernel for integers.\n cloud_slice_indices = np.cumsum([0] + [len(c) for c in np_clouds],\n dtype=np.float32)\n tf_clouds = tf.constant(np.concatenate(np_clouds), dtype=tf.float32)\n return tf_clouds, cloud_slice_indices", "def pycloud(ctx, log_level):\n KeyPairStorage.initialize()\n PyCloudConfig.initialize_state_mgmt()\n\n ctx.obj = {}\n configure_logger(level=log_level)\n return 0", "def _get_subcloud_users(self):\n DEFAULT_SERVICE_PROJECT_NAME = 'services'\n # First entry is openstack user name, second entry is the user stored\n # in keyring. Not sure why heat_admin uses a different keystone name.\n SUBCLOUD_USERS = [\n ('nova', 'nova'),\n ('placement', 'placement'),\n ('sysinv', 'sysinv'),\n ('patching', 'patching'),\n ('heat', 'heat'),\n ('ceilometer', 'ceilometer'),\n ('vim', 'vim'),\n ('aodh', 'aodh'),\n ('panko', 'panko'),\n ('mtce', 'mtce'),\n ('cinder', 'cinder'),\n ('glance', 'glance'),\n ('neutron', 'neutron'),\n ('heat_admin', 'heat-domain'),\n ('gnocchi', 'gnocchi'),\n ('fm', 'fm')\n ]\n\n user_list = list()\n for user in SUBCLOUD_USERS:\n password = keyring.get_password(user[1],\n DEFAULT_SERVICE_PROJECT_NAME)\n if password:\n user_dict = dict()\n user_dict['name'] = user[0]\n user_dict['password'] = password\n user_list.append(user_dict)\n else:\n LOG.error(\"User %s not found in keyring as %s\" % (user[0],\n user[1]))\n pecan.abort(500, _('System configuration error'))\n\n return user_list", "def map_services(environment):\n url_map = {}\n for service, local_port in SERVICES.items():\n if environment == \"production\":\n url_map[service] = production_url(service)\n if environment == \"development\":\n url_map[service] = local_url(local_port)\n return url_map", "def _init_cloud(self, cloud_arg):\n # Disable too broad exception warning\n # pylint: disable=W0703\n cloud = None\n if cloud_arg:\n try:\n if hasattr(self.args, \"cm\"):\n cloud_module = self.args.cm if self.args.cm else None\n self.logger.info(\"Creating cloud module {}.\".format(cloud_module))\n else:\n cloud_module = None\n\n cloud = Cloud(host=None, module=cloud_module, logger=self.logger, args=self.args)\n except Exception as error:\n self.logger.warning(\"Cloud module could not be initialized: {}\".format(error))\n cloud = None\n return cloud", "def get(self):\n return get_all_provider()", "def create_cloud(header, fields, points):\n\t\n\tcloud_struct = struct.Struct(_get_struct_fmt(False, fields))\n\t\n\tbuff = ctypes.create_string_buffer(cloud_struct.size * len(points))\n\t\n\tpoint_step, pack_into = cloud_struct.size, cloud_struct.pack_into\n\toffset = 0\n\t\n\tfor p in points:\n\t\tpack_into(buff, offset, *p)\n\t\toffset += point_step\n\treturn PointCloud2(header=header,\n\t\t\t\t\t\theight=1,\n\t\t\t\t\t\twidth=len(points),\n\t\t\t\t\t\tis_dense=False,\n\t\t\t\t\t\tis_bigendian=False,\n\t\t\t\t\t\tfields=fields,\n\t\t\t\t\t\tpoint_step=cloud_struct.size,\n\t\t\t\t\t\trow_step=cloud_struct.size * len(points),\n\t\t\t\t\t\tdata=buff.raw)", "def create_providers(cls) -> Iterable['BaseProvider']:\n return []", "def create_providerinfo(self, setup=None):\n pcr_class = self.server.message_factory.get_response_type(\n \"configuration_endpoint\"\n )\n _provider_info = copy.deepcopy(self.capabilities.to_dict())\n\n if self.jwks_uri and self.keyjar:\n _provider_info[\"jwks_uri\"] = self.jwks_uri\n\n for endp in self.endp:\n if not self.baseurl.endswith(\"/\"):\n baseurl = self.baseurl + \"/\"\n else:\n baseurl = self.baseurl\n _provider_info[\"{}_endpoint\".format(endp.etype)] = urljoin(\n baseurl, endp.url\n )\n\n if setup and isinstance(setup, dict):\n for key in pcr_class.c_param.keys():\n if key in setup:\n _provider_info[key] = setup[key]\n\n _provider_info[\"issuer\"] = self.name\n _provider_info[\"version\"] = \"3.0\"\n\n return pcr_class(**_provider_info)", "def configure_cloud(self):\n \n # if API check fails, log error output - actually probably not since we have interactivity here\n again = True\n while again:\n # print info\n print(\"Welcome to the cloud configuration center.\\n\")\n print(\"Here you can enter your plant ID and activation key to link it to \"\n \"the cloud and enable data uploads.\")\n print(\"The activation details for the plants you own are available on the web \"\n \"application\")\n # get plant ID from user\n entered_id = input(\n \"Please enter the plant ID (enter nothing to cancel): \")\n if entered_id != \"\":\n # get plant key/password from user\n entered_key = input(\n \"Please enter the plant activation key (enter nothing to cancel): \")\n if entered_key != \"\":\n hasher = hashlib.sha256()\n hasher.update(bytes(entered_key, 'utf-8'))\n # Uncomment this line and comment the one after if want hashing\n # hashed_key = hasher.hexdigest()\n hashed_key = entered_key\n # verify entered details\n verified = self.verify_plant(entered_id, hashed_key)\n\n if verified:\n # save details to file if valid, exit cloud center\n json_info = {\"plant_id\": entered_id,\n \"plant_key\": hashed_key}\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n json.dump(json_info, config_file)\n print(\"Successful cloud link! \\n\")\n again = False\n else:\n # option to try again if verification failed (invalid details)\n print(\"Error: plant ID or activation key is incorrect.\")\n user_choice = input(\"Try again? (y/n): \")\n if user_choice.lower() == \"y\":\n again = True\n else:\n again = False\n else:\n # entering nothing exits the cloud center\n again = False\n else:\n # entering nothing exits the cloud center\n again = False", "def __init__(self, name=None):\n\n conf = Config()[\"cloudmesh\"]\n super().__init__(name)\n\n self.user = Config()[\"cloudmesh\"][\"profile\"][\"user\"]\n self.spec = conf[\"cloud\"][name]\n self.cloud = name\n\n self.default = self.spec[\"default\"]\n self.cloudtype = self.spec[\"cm\"][\"kind\"]\n\n self.cred = self.spec[\"credentials\"]\n self.default = self.spec[\"default\"]\n self.project_id = self.cred[\"auth\"][\"project_id\"]\n\n # pprint(self.cred)\n\n self.cloudman = openstack.connection.Connection(**self.cred)\n\n # self.default_image = deft[\"image\"]\n # self.default_size = deft[\"size\"]\n # self.default.location = cred[\"datacenter\"]\n\n try:\n self.public_key_path = conf[\"profile\"][\"publickey\"]\n self.key_path = path_expand(\n Config()[\"cloudmesh\"][\"profile\"][\"publickey\"])\n f = open(self.key_path, 'r')\n self.key_val = f.read()\n except:\n raise ValueError(\"the public key location is not set in the \"\n \"profile of the yaml file.\")", "def map_services(environment):\n url_map = {}\n for service, local_port in SERVICES.items():\n if environment == 'production':\n url_map[service] = production_url(service)\n if environment == 'development':\n url_map[service] = local_url(local_port)\n return url_map", "def network_service_providers(self):\n path = '/v2.0/service-providers'\n res = self.network.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack network service providers: %s' % \n truncate(res))\n return res[0]['service_providers']", "def _makeimap(self):\n self.map_['source'] = 'GOES'\n self.map_['provider'] = 'NOAA'\n self.map_['instrument'] = 'SUVI'\n self.map_['physobs'] = 'flux'", "def initializeCollection():\n return {SENSOR1:[], SENSOR2:[], SENSOR3:[],SENSOR4:[], DATE:[]}", "def __init__(self, name):\n self.cloud = name\n self.cloudtype = \"multipass\"\n config = Config()\n self.default = config[f\"cloudmesh.volume.{self.cloud}.default\"]\n self.cm = CmDatabase()", "def getProvidersReferences(self):\n field = self.getWrappedField('provider')\n providers = list(field._Vocabulary(self).items())\n providers.sort(lambda a, b: cmp(a[1].lower(), b[1].lower()))\n return atapi.DisplayList(providers)", "def setup_aws_data():\n\n target_project_arn = setup_project(PROJECT_NAME)\n device_pool_arn = setup_device_pool(target_project_arn, DEVICE_POOL_NAME)\n get_device_info(target_project_arn)", "def get_providers(self):\n return [\"Temperature\", \"Average Temperature Sea\", \"Average Temperature Land\"]", "def get_cloud_name(self, cm_user_id):\n try:\n return self.cm_user.get_defaults(cm_user_id)['nova-cloud']\n except KeyError:\n log.error('Set OpenStack as a default cloud.'\n '\"stack\" ONLY works with openstack platform.')\n return None", "def readCloud(self):\n Cldict = self.config.Cl\n cloudFile = os.path.join(self.config.path, self.config.cloudFile[self.idnum])\n\n self.cloud = []\n value_sorted_Cldict_keys = sorted(Cldict, key=lambda k: Cldict[k])\n for k in value_sorted_Cldict_keys:\n self.cloud.append([])\n self.nParticulate = len(Cldict.keys())\n with open(cloudFile, \"r\") as fp:\n expected_number = utils.get_expected_number_of_entries(fp)\n for line in fp:\n cval = utils.get_data_from(line)\n if cval is None or len(cval) != expected_number:\n continue\n for n in range(self.nParticulate): # Initialize all of the particulates to 0.0\n if n < len(cval):\n self.cloud[n].append(cval[n])\n else:\n self.cloud[n].append(0.0)\n self.nCloud = len(self.cloud[0])\n # ##Redo the particulate dictionary for the self.cloud index positions\n nid, sk = utils.invertDictionary(Cldict)\n for i, k in enumerate(sk):\n Cldict[nid[k]] = i\n self.config.Cl = Cldict\n\n self.cloud = np.array(self.cloud)\n # ## Check that P is monotonically increasing\n monotonic = np.all(np.diff(self.cloud[self.config.Cl['P']]) > 0.0)\n if not monotonic:\n self.cloud = np.fliplr(self.cloud)\n monotonic = np.all(np.diff(self.cloud[self.config.Cl['P']]) > 0.0)\n if not monotonic:\n raise ValueError(\"Pressure not monotonically increasing in {}\".format(cloudFile))\n\n # ## Renormalize so that deepest z is 0 and set DZ\n self._renorm_z('cloud')", "def cloud_init(name, vm_=None, **kwargs):\n init_interface = cloud_init_interface(name, vm_, **kwargs)\n name = init_interface.pop(\"name\", name)\n return init(name, **init_interface)", "def get_consumer_type_map():\n global _CONSUMER_TYPE_MAP\n if _CONSUMER_TYPE_MAP is not None:\n return _CONSUMER_TYPE_MAP\n tbl = db.get_table('consumer_types')\n sel = sa.select([tbl.c.id, tbl.c.code])\n sess = db.get_session()\n _CONSUMER_TYPE_MAP = {r[1]: r[0] for r in sess.execute(sel)}\n return _CONSUMER_TYPE_MAP", "def _makeimap(self):\n self.map_[\"source\"] = \"nasa\"\n self.map_[\"instrument\"] = \"goes\"\n self.map_[\"physobs\"] = \"irradiance\"\n self.map_[\"provider\"] = \"sdac\"", "def getCloud(self, mjd, config):\n w= 'cloud'\n cloud = self._checkWeather(mjd, w, config)\n return cloud", "def get_aws(verbosity, resultset, providerversion):\n try:\n response = requests.get(AWSAPIURL)\n if verbosity:\n print(response.status_code)\n if response.status_code == 200:\n cidrdata = json.loads(response.content)\n providerversion[\"AWS\"] = cidrdata[\"createDate\"]+\" \"+cidrdata[\"syncToken\"]\n for i in range(0, len(cidrdata[\"prefixes\"])):\n if cidrdata[\"prefixes\"][i][\"ip_prefix\"] not in resultset:\n resultset[cidrdata[\"prefixes\"][i][\"ip_prefix\"]] = \"AWS\"\n for i in range(0, len(cidrdata[\"ipv6_prefixes\"])):\n if cidrdata[\"ipv6_prefixes\"][i][\"ipv6_prefix\"] not in resultset:\n resultset[cidrdata[\"ipv6_prefixes\"][i][\"ipv6_prefix\"]] = \"AWS\"\n except Exception as get_exception:\n print(\"Exception\")\n print(get_exception)\n\n return resultset, providerversion", "def global_service_collection():\n\tglobal global_lsc\n\t# If this is the first call then the object is not yet created\n\tif not global_lsc:\n\t\t# Create the global object\n\t\tglobal_lsc = LadonServiceCollection()\n\treturn global_lsc", "def get_region_services(self,format=None):\n clients = HWIOS.pb_server.get_clients()\n region_services = []\n for client in clients:\n region_services.extend(client.region_services)\n #for django forms\n if format == 'tuple':\n tuple_list = []\n for region_service in region_services:\n tuple_list.append((region_service['uuid'],region_service['name']))\n return tuple_list\n return region_services", "def selectable_services():\n\n db = current.db\n s3db = current.s3db\n\n stable = s3db.org_service\n query = (stable.deleted == False)\n rows = db(query).select(stable.id,\n stable.name,\n )\n services = {row.id: row.name for row in rows}\n return services", "def _add_services(self):\n this_service = {'name': '{{ metadata.package }}'}\n other_services = [\n {'name': 'mysql',\n 'location': 'cs:percona-cluster',\n 'constraints': {'mem': '3072M'}},\n {'name': 'rabbitmq-server'},\n {'name': 'keystone'},\n {'name': 'manila'}\n ]\n super(ManilaPluginCharmDeployment, self)._add_services(\n this_service, other_services)", "def construct_global_ctx(self):\n super().construct_global_ctx()\n gtx = self.gtx\n rc = self.rc\n if \"groups\" in self.needed_dbs:\n rc.pi_id = get_pi_id(rc)\n rc.coll = f\"{TARGET_COLL}\"\n try:\n if not rc.database:\n rc.database = rc.databases[0][\"name\"]\n except:\n pass\n colls = [\n sorted(\n all_docs_from_collection(rc.client, collname), key=_id_key\n )\n for collname in self.needed_dbs\n ]\n for db, coll in zip(self.needed_dbs, colls):\n gtx[db] = coll\n gtx[\"all_docs_from_collection\"] = all_docs_from_collection\n gtx[\"float\"] = float\n gtx[\"str\"] = str\n gtx[\"zip\"] = zip", "def readProvidersFromDatabase(self, database):\n\t\ttry:\n\t\t\tdb = sqlite3.connect(database)\n\t\t\tc = db.cursor()\n\t\t\tc.execute(\"SELECT * FROM Providers\")\n\t\t\tfetched = c.fetchall()\n\t\t\tfor p in fetched:\n\t\t\t\tself.prvList.append(format3.Provider(p[1], p[2], p[3], petlib.pack.decode(p[4])))\n\t\texcept Exception, e:\n\t\t\tprint \"[%s] > Error during reading from the database: %s\" % (self.name, str(e))", "def provider_initialize(cls, provider_name, config):\n try:\n provider_obj = config.get_object(provider_name, kind='Provider')\n print \"Provider object {0}\".format(provider_obj)\n except DatastoreException as e:\n raise MOLNSException(\"provider not found\")\n #\n print \"Checking all config artifacts.\"\n # check for ssh key\n if provider_obj['key_name'] is None or provider_obj['key_name'] == '':\n print \"Error: no key_name specified.\"\n return\n elif not provider_obj.check_ssh_key():\n print \"Creating key '{0}'\".format(provider_obj['key_name'])\n provider_obj.create_ssh_key()\n else:\n print \"SSH key={0} is valid.\".format(provider_obj['key_name'])\n\n # check for security group\n if provider_obj['group_name'] is None or provider_obj['group_name'] == '':\n print \"Error: no security group specified.\"\n return\n elif not provider_obj.check_security_group():\n print \"Creating security group '{0}'\".format(provider_obj['group_name'])\n provider_obj.create_seurity_group()\n else:\n print \"security group={0} is valid.\".format(provider_obj['group_name'])\n\n # check for MOLNS image\n if provider_obj['molns_image_name'] is None or provider_obj['molns_image_name'] == '':\n if provider_obj['ubuntu_image_name'] is None or provider_obj['ubuntu_image_name'] == '':\n print \"Error: no ubuntu_image_name given, can not create molns image.\"\n else:\n print \"Creating new image, this process can take a long time (10-30 minutes).\"\n provider_obj['molns_image_name'] = provider_obj.create_molns_image()\n elif not provider_obj.check_molns_image():\n print \"Error: a molns image ID was provided, but it does not exist.\"\n return\n\n print \"Success.\"\n config.save_object(provider_obj, kind='Provider')", "def __init_services(self, base_url, repository):\n self.rest_services = {}\n self.rest_services[\"protocol\"] = base_url+\"/protocol\"\n self.rest_services[\"repositories\"] = base_url+\"/repositories\"\n self.rest_services[\"repository\"] = base_url+\"/repositories/{}\".format(repository)\n self.rest_services[\"statements\"] = self.rest_services[\"repository\"]+\"/statements\"\n self.rest_services[\"contexts\"] = self.rest_services[\"repository\"]+\"/contexts\"\n self.rest_services[\"size\"] = self.rest_services[\"repository\"]+\"/size\"\n self.rest_services[\"transaction\"] = self.rest_services[\"repository\"]+\"/transactions\"", "def cloud():\n log.debug(\"Initializing Cloud API fixture\")\n\n api_gw = os.getenv('PELION_CLOUD_API_GW', 'https://api.us-east-1.mbedcloud.com')\n api_key = os.environ['PELION_CLOUD_API_KEY']\n cloud_api = PelionCloud(api_gw, api_key)\n\n payload = {'name': 'pelion_e2e_dynamic_api_key'}\n r = cloud_api.account.create_api_key(payload, expected_status_code=201)\n resp = r.json()\n cloud_api.rest_api.set_default_api_key(resp['key'])\n\n yield cloud_api\n\n log.debug('Cleaning out the Cloud API fixture')\n headers = {'Authorization': 'Bearer {}'.format(api_key)}\n cloud_api.account.delete_api_key(resp['id'], headers=headers, expected_status_code=204)", "def test_get_name(self):\n provider = GCPLocalProvider()\n self.assertEqual(provider.name(), Provider.PROVIDER_GCP_LOCAL)", "def contract_map():\n\n raw = run_cmd('/bin/svcs -vHo ctid,fmri')\n ret = {}\n\n for l in raw:\n ct, svc = l.split()\n if ct != '-':\n ret[ct] = svc\n\n return ret", "def get_cloud_info(location):\n params = dict()\n # Read in the file\n with open(location, 'r') as myfile: data=myfile.read()\n obj = json.loads(data)\n for o in obj:\n params[o] = obj[o]['value']\n return params", "def lambda_handler(event, context):\n\n resp = {}\n\n with conn.cursor() as cur:\n \n \n # provider = []\n # resp['DES_Provider'] = provider\n # cur.execute(\"SELECT * FROM `DES_NAME`\")\n # rows = cur.fetchall()\n \n # for row in rows:\n # info = {}\n # info['name'] = row[1]\n # info['website'] = row[2]\n # provider.append(info)\n \n # print(\"{0} {1} {2}\".format(row[0], row[1], row[2]))\n \n # cur.execute(\"SELECT * FROM `DES_SERVICE` WHERE `DES_ID` = 1\")\n # rows = cur.fetchall()\n # service = []\n # resp['DES_Service'] = service\n \n # for row in rows:\n # info = {}\n # info['program'] = row[0]\n # info['speciality'] = row[1]\n # info['rating'] = row[2]\n # service.append(info)\n \n sql = '''SELECT * FROM DES_NAME a INNER JOIN DES_SERVICE b ON a.DES_ID = b.DES_ID INNER JOIN DES_SITE c ON b.SITE_ID = c.SITE_ID ORDER BY RAND() LIMIT 10'''\n \n cur.execute(sql)\n rows = cur.fetchall()\n all_info = []\n resp['All_Info'] = all_info\n \n for row in rows:\n info = {}\n info['name'] = row[1]\n info['website'] = row[2]\n info['program'] = row[3]\n info['speciality'] = row[4]\n info['rating'] = row[5]\n info['speciality_new'] = row[8]\n info['speciality_group'] = row[9]\n info['address'] = row[13]\n info['site_location'] = row[14]\n info['postal'] = row[20]\n info['url'] = row[21]\n info['phone'] = row[22]\n info['email'] = row[23]\n\n all_info.append(info)\n \n sql3 = '''SELECT DISTINCT Name FROM DES_NAME'''\n cur.execute(sql3)\n rows = cur.fetchall()\n name_list = []\n for row in rows:\n name_list.append(row[0])\n resp['Name_List'] = name_list\n \n sql4 = '''SELECT DISTINCT Speciality_Group FROM DES_SERVICE'''\n cur.execute(sql4)\n rows = cur.fetchall()\n speciality_list = []\n for row in rows:\n speciality_list.append(row[0])\n resp['Speciality_List'] = speciality_list\n \n # sql5 = '''SELECT * FROM DES_PERFORMANCE'''\n \n # cur.execute(sql5)\n # rows = cur.fetchall()\n # all_info = []\n # resp['Performance'] = all_info\n \n # for row in rows:\n # info = {}\n # info['year'] = row[0]\n # info['month'] = row[1]\n # info['referred'] = row[2]\n # info['suspended'] = row[3]\n # info['commenced'] = row[4]\n # info['total'] = row[5]\n # info['commenced_employment'] = row[6]\n # info['commenced_placement'] = row[7]\n # info['commenced_ongoing'] = row[8]\n # # info['mom'] = str(row[9])\n # # info['direction'] = row[10]\n # # info['referred_p'] = str(row[11])\n # # info['suspended_p'] = str(row[12])\n # # info['commenced_p'] = str(row[13])\n # # info['commenced_employment_p'] = str(row[14])\n # # info['commenced_placement_p'] = str(row[15])\n # # info['commenced_ongoing_p'] = str(row[16])\n # info['mom'] = row[9]\n # info['direction'] = row[10]\n # info['referred_p'] = row[11]\n # info['suspended_p'] = row[12]\n # info['commenced_p'] = row[13]\n # info['commenced_employment_p'] = row[14]\n # info['commenced_placement_p'] = row[15]\n # info['commenced_ongoing_p'] = row[16]\n \n\n # all_info.append(info)\n \n sql6 = '''SELECT DISTINCT Year FROM DES_PERFORMANCE ORDER BY Year DESC'''\n \n cur.execute(sql6)\n rows = cur.fetchall()\n year_list = []\n resp['Year_List'] = year_list\n for row in rows:\n year_list.append(row[0])\n \n resp['Month_List'] = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']\n \n sql7 = '''SELECT * FROM `DES_PERFORMANCE` ORDER BY `DES_PERFORMANCE`.`Year` DESC LIMIT 1'''\n cur.execute(sql7)\n rows = cur.fetchall()\n latest_performance = []\n # latest_performance.append(row)\n resp['Latest_Performance'] = latest_performance\n for row in rows:\n info = {}\n info['year'] = row[0]\n info['month'] = row[1]\n info['referred'] = row[2]\n info['suspended'] = row[3]\n info['commenced'] = row[4]\n info['total'] = row[5]\n info['commenced_employment'] = row[6]\n info['commenced_placement'] = row[7]\n info['commenced_ongoing'] = row[8]\n # info['mom'] = str(row[9])\n # info['direction'] = row[10]\n # info['referred_p'] = str(row[11])\n # info['suspended_p'] = str(row[12])\n # info['commenced_p'] = str(row[13])\n # info['commenced_employment_p'] = str(row[14])\n # info['commenced_placement_p'] = str(row[15])\n # info['commenced_ongoing_p'] = str(row[16])\n info['mom'] = row[9]\n info['direction'] = row[10]\n info['referred_p'] = row[11]\n info['suspended_p'] = row[12]\n info['commenced_p'] = row[13]\n info['commenced_employment_p'] = row[14]\n info['commenced_placement_p'] = row[15]\n info['commenced_ongoing_p'] = row[16]\n info['date'] = row[17]\n\n latest_performance.append(info)\n \n # print(resp)\n \n # for row in cur:\n # item_count += 1\n # logger.info(row)\n #print(row)\n # conn.commit()\n \n # print(resp)\n\n return {\n 'statusCode': 200,\n 'headers': {\n 'Access-Control-Allow-Headers': 'Content-Type',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'OPTIONS,POST,GET'\n },\n 'body': json.dumps(resp, cls=DecimalEncoder)\n }", "def get_places():\n global app_id, rest_api_key, places\n\n if not places:\n connection = httplib.HTTPSConnection(PARSE_API_URL, PARSE_API_PORT)\n connection.connect()\n connection.request(\n method='GET',\n url=PLACES_ENDPOINT,\n headers={\"X-Parse-Application-Id\": app_id, \"X-Parse-REST-API-Key\": rest_api_key}\n )\n places = json.loads(connection.getresponse().read())\n\n return places", "def initialize_policies(self, policy_collection, options):\n from c7n.policy import Policy, PolicyCollection\n policies = []\n service_region_map, resource_service_map = get_service_region_map(\n options.regions, policy_collection.resource_types, self.type)\n if 'all' in options.regions:\n enabled_regions = {\n r['RegionName'] for r in\n get_profile_session(options).client('ec2').describe_regions(\n Filters=[{'Name': 'opt-in-status',\n 'Values': ['opt-in-not-required', 'opted-in']}]\n ).get('Regions')}\n for p in policy_collection:\n if 'aws.' in p.resource_type:\n _, resource_type = p.resource_type.split('.', 1)\n else:\n resource_type = p.resource_type\n available_regions = service_region_map.get(\n resource_service_map.get(resource_type), ())\n\n # its a global service/endpoint, use user provided region\n # or us-east-1.\n if not available_regions and options.regions:\n candidates = [r for r in options.regions if r != 'all']\n candidate = candidates and candidates[0] or 'us-east-1'\n svc_regions = [candidate]\n elif 'all' in options.regions:\n svc_regions = list(set(available_regions).intersection(enabled_regions))\n else:\n svc_regions = options.regions\n\n for region in svc_regions:\n if available_regions and region not in available_regions:\n level = ('all' in options.regions and\n logging.DEBUG or logging.WARNING)\n # TODO: fixme\n policy_collection.log.log(\n level, \"policy:%s resources:%s not available in region:%s\",\n p.name, p.resource_type, region)\n continue\n options_copy = copy.copy(options)\n options_copy.region = str(region)\n\n if len(options.regions) > 1 or 'all' in options.regions and getattr(\n options, 'output_dir', None):\n options_copy.output_dir = join_output(options.output_dir, region)\n policies.append(\n Policy(p.data, options_copy,\n session_factory=policy_collection.session_factory()))\n\n return PolicyCollection(\n # order policies by region to minimize local session invalidation.\n # note relative ordering of policies must be preserved, python sort\n # is stable.\n sorted(policies, key=operator.attrgetter('options.region')),\n options)", "def test_register_cloud(self):\n pass", "def get_cloud_from_controller():\n\n cmd = ['juju', 'show-controller', '--format=yaml']\n output = subprocess.check_output(cmd)\n if six.PY3:\n output = output.decode('utf-8')\n cloud_config = yaml.load(output)\n # There will only be one top level controller from show-controller,\n # but we do not know its name.\n assert len(cloud_config) == 1\n try:\n return list(cloud_config.values())[0]['details']['cloud']\n except KeyError:\n raise KeyError(\"Failed to get cloud information from the controller\")", "def get_oracle(verbosity, resultset, providerversion):\n try:\n response = requests.get(ORACLEAPIURL)\n if verbosity:\n print(response.status_code)\n if response.status_code == 200:\n cidrdata = json.loads(response.content)\n providerversion[\"ORACLE\"] = cidrdata[\"last_updated_timestamp\"]\n for i in range(0, len(cidrdata[\"regions\"])):\n for j in range(0, len(cidrdata[\"regions\"][i][\"cidrs\"])):\n if cidrdata[\"regions\"][i][\"cidrs\"][j][\"cidr\"] not in resultset:\n resultset[cidrdata[\"regions\"][i][\"cidrs\"][j][\"cidr\"]] = \"Oracle\"\n\n except Exception as get_exception:\n print(\"Exception\")\n print(get_exception)\n\n return resultset, providerversion", "def all_base_cloud_ids(self):\n raise NotImplementedError", "def available_services():\n all_datas = ()\n data = ()\n\n for class_path in settings.TH_SERVICES:\n class_name = class_path.rsplit('.', 1)[1]\n # 2nd array position contains the name of the service\n data = (class_name, class_name.rsplit('Service', 1)[1])\n all_datas = (data,) + all_datas\n return all_datas", "def main():\n\n # Setup `pysc` to use BASIC auth, with a username, and password. Also sets the endpoint to use.\n setup_sensorcloud_basic(CONSTS['SC_USERNAME'], CONSTS['SC_PASSWORD'],\n CONSTS['SC_ENDPOINT'], CONSTS['PYSC_DEBUG'])\n\n org_id = CONSTS['ORG_ID']\n\n # Ensure the organisation exists on the SensorCloud endpoint.\n try:\n organisation = pysc.models.Organisation.single(org_id)\n except KeyError:\n raise RuntimeWarning(\"\"\"The organisation named {:s} was not found.\\n\"\"\"\n \"\"\"Although the `pysc` api has functionality to create an organisation, it cannot \"\"\"\n \"\"\"do so on the sensor-cloud.io instance on AWS.\"\"\".format(org_id))\n # Ensure sanity, check we got the organisation that we asked for.\n assert (org_id == organisation.id)\n\n # Here we use the Group.resolve_all helper with organisation_id param to filter groups based on id\n # The resolve_all command is similar to .index() however it also calls .follow() on found link automatically,\n # _and_ it converts the resulting HAL objects into real valid `pysc` Group() objects.\n org_groups = pysc.models.Group.resolve_all(params={'organisation_id': org_id})\n # We are not likely to have more than 1000 groups, so we don't need to do return doc pagination here.\n for g in org_groups:\n group_id = g.id\n print(\"Found group: {:s}\".format(group_id))\n\n print(\"Found a total of {:d} groups for {:s} on that SensorCloud endpoint.\".format(len(org_groups), org_id))", "def __init__(self, provider_class, provider_type, label, origin, config):\n super(Provider, self).__init__()\n\n self.created = datetime.datetime.now()\n \"\"\"datetime: The creation time of this document\"\"\"\n\n self.modified = datetime.datetime.now()\n \"\"\"datetime: The last modified time of this document\"\"\"\n\n self.provider_class = ProviderClass(provider_class)\n \"\"\"ProviderClass: The class of provider, either compute or storage\"\"\"\n\n self.provider_type = provider_type\n \"\"\"str: The type (or host) of the provider. (e.g. static, gcloud, etc)\"\"\"\n\n self.label = label\n \"\"\"str: The human-readable provider label\"\"\"\n\n self.origin = origin\n \"\"\"dict: The origin (e.g. user) of the provider\"\"\"\n\n self.config = config\n \"\"\"dict: The provider-specific configuration\"\"\"" ]
[ "0.5848229", "0.5848229", "0.5762712", "0.5732575", "0.55106837", "0.5443013", "0.54311204", "0.5407812", "0.5398516", "0.5377608", "0.5274966", "0.52417177", "0.5234368", "0.52056146", "0.5200299", "0.5192333", "0.51870906", "0.51459664", "0.5107439", "0.50989896", "0.5087255", "0.5071367", "0.5060922", "0.50516266", "0.5020789", "0.5019573", "0.50130004", "0.4989005", "0.4985684", "0.49796763", "0.4969917", "0.49530387", "0.49403435", "0.4917975", "0.49161592", "0.4893041", "0.48891115", "0.48713607", "0.4869341", "0.4850112", "0.4841994", "0.4841366", "0.48248893", "0.4821743", "0.47992146", "0.47794345", "0.47704756", "0.473432", "0.47214004", "0.4711734", "0.47086176", "0.47003838", "0.46970284", "0.46958876", "0.46846893", "0.468119", "0.46722606", "0.4672012", "0.4671675", "0.46621016", "0.4661352", "0.46605158", "0.46574268", "0.46565795", "0.46564558", "0.465618", "0.4643758", "0.46410987", "0.4641008", "0.46380645", "0.46227774", "0.4621463", "0.46148068", "0.4607298", "0.4606681", "0.4605121", "0.4599802", "0.45947248", "0.45855793", "0.4585125", "0.45762357", "0.45698166", "0.4567425", "0.45654988", "0.45644104", "0.45640358", "0.45525044", "0.45499858", "0.45444334", "0.45443317", "0.45402455", "0.45399848", "0.4539745", "0.45352817", "0.45319697", "0.45289746", "0.45265666", "0.45248935", "0.45176625", "0.45169014" ]
0.8129826
0
Add cloud This is a class method, meaning that it is meant to be called on the class itself and not on an instance of the class. You're not meant to be calling this directly, but on a cloud subclass
def add(cls, owner, name, user=None, id='', **kwargs): if not name: raise RequiredParameterMissingError('name') if not owner or not isinstance(owner, Organization): raise BadRequestError('owner') if Cloud.objects(owner=owner, name=name, deleted=None): raise CloudExistsError() cloud = cls(owner=owner, name=name) if id: cloud.id = id fail_on_error = kwargs.pop('fail_on_error', True) fail_on_invalid_params = kwargs.pop('fail_on_invalid_params', False) cloud.ctl.add(user, **kwargs) return cloud
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cloud(self, cloud):\n\n self._cloud = cloud", "def add_cloud_region(self, position):\n region = self.cloud_region_selector(position)\n self.regions[id(region)] = region", "def cloud_cover(self):\r\n raise NotImplementedError", "def add_subcommand(self, command):\n command.cloud = self\n super(AbstractCloud, self).add_subcommand(command)", "def _init_cloud(self, cloud_arg):\n # Disable too broad exception warning\n # pylint: disable=W0703\n cloud = None\n if cloud_arg:\n try:\n if hasattr(self.args, \"cm\"):\n cloud_module = self.args.cm if self.args.cm else None\n self.logger.info(\"Creating cloud module {}.\".format(cloud_module))\n else:\n cloud_module = None\n\n cloud = Cloud(host=None, module=cloud_module, logger=self.logger, args=self.args)\n except Exception as error:\n self.logger.warning(\"Cloud module could not be initialized: {}\".format(error))\n cloud = None\n return cloud", "def make_point_cloud(self):\r\n\r\n self.pointCloud = VtkPointCloud()\r\n for k in range(np.size(self.pos, 0)):\r\n self.pointCloud.addPoint(self.pos[k, :])\r\n\r\n # Renderer\r\n renderer = vtk.vtkRenderer()\r\n renderer.AddActor(self.pointCloud.vtkActor)\r\n renderer.SetBackground(.2, .3, .4)\r\n renderer.SetBackground(0.0, 0.0, 0.0)\r\n renderer.ResetCamera()\r\n\r\n # Render Window\r\n renderWindow = vtk.vtkRenderWindow()\r\n renderWindow.AddRenderer(renderer)\r\n\r\n # Interactor\r\n renderWindowInteractor = vtk.vtkRenderWindowInteractor()\r\n renderWindowInteractor.SetRenderWindow(renderWindow)\r\n\r\n # Begin Interaction\r\n renderWindow.Render()\r\n renderWindow.SetWindowName(\"XYZ Data Viewer: \")\r\n renderWindowInteractor.Start()", "def tag_cloud():\n\n return LOAD('plugin_wiki','cloud')", "def init_cloud_api(self, args=None):\n pass", "def add(self, *args):\n pass", "def add(self, *args):\n pass", "def add(self):\n pass", "def add_infra (self):\n raise NotImplementedError", "def add_cloud_plugin_content(self, content):\r\n self._cloud_plugin_content.append(content)", "def test_get_cloud(self):\n pass", "def _process_single_cloud(self, cloud_path):\n # If required files exist, skip processing\n if osp.exists(cloud_path):\n return\n\n # Create necessary parent folders if need be\n os.makedirs(osp.dirname(cloud_path), exist_ok=True)\n\n # Read the raw cloud corresponding to the final processed\n # `cloud_path` and convert it to a Data object\n raw_path = self.processed_to_raw_path(cloud_path)\n data = self.read_single_raw_cloud(raw_path)\n\n if getattr(data, 'y', None) is not None:\n data.y[data.y == -1] = self.num_classes\n\n # If the cloud path indicates a tiling is needed, apply it here\n if self.xy_tiling is not None:\n tile = self.get_tile_from_path(cloud_path)[0]\n data = SampleXYTiling(x=tile[0], y=tile[1], tiling=tile[2])(data)\n elif self.pc_tiling is not None:\n tile = self.get_tile_from_path(cloud_path)[0]\n data = SampleRecursiveMainXYAxisTiling(x=tile[0], steps=tile[1])(data)\n\n # Apply pre_transform\n if self.pre_transform is not None:\n nag = self.pre_transform(data)\n else:\n nag = NAG([data])\n\n # To save some disk space, we discard some level-0 attributes\n if self.point_save_keys is not None:\n keys = set(nag[0].keys) - set(self.point_save_keys)\n nag = NAGRemoveKeys(level=0, keys=keys)(nag)\n elif self.point_no_save_keys is not None:\n nag = NAGRemoveKeys(level=0, keys=self.point_no_save_keys)(nag)\n if self.segment_save_keys is not None:\n keys = set(nag[1].keys) - set(self.segment_save_keys)\n nag = NAGRemoveKeys(level='1+', keys=keys)(nag)\n elif self.segment_no_save_keys is not None:\n nag = NAGRemoveKeys(level=0, keys=self.segment_no_save_keys)(nag)\n\n # Save pre_transformed data to the processed dir/<path>\n nag.save(\n cloud_path,\n y_to_csr=self.save_y_to_csr,\n pos_dtype=self.save_pos_dtype,\n fp_dtype=self.save_fp_dtype)\n del nag", "def __init__(self, width, height):\n super().__init__(width, height)\n\n self.rifle = Rifle()\n self.score = 0\n\n self.bullets = []\n\n # Initialize the list of targets to an empty list\n self.targets = []\n\n # List of clouds\n self.clouds = []\n\n # I set the background color to sky blue to set up a background for the game\n arcade.set_background_color(arcade.color.SKY_BLUE)\n\n # Initialize this to zero to create variables. These will be changed in\n # the on_mouse_motion by setting them equal to the x and y of the mouse\n # which will be used in other functions\n self.mouse_x = 0.0\n self.mouse_y = 0.0\n\n # Determine the number of clouds to add to the list. This is initialized here\n # so that the game will start with a random number of clouds each time it's played\n self.num_clouds = random.randint(0, 5)\n # A loop to add Cloud objects to the list of clouds.\n for i in range(self.num_clouds):\n self.clouds.append(Cloud())", "def __init__(self, wink, opp):\n super().__init__(wink, opp)\n opp.data[DOMAIN][\"entities\"][\"scene\"].append(self)", "def test_register_cloud(self):\n pass", "def add_cloud_plugin_content(self, content):", "def cloud_name(self, cloud_name):\n\n self._cloud_name = cloud_name", "def addPoint(self, *args, **kwargs):\n ...", "def __init__(self, x, y, width, height, color, name):\n self.__class__.instances.append(self)\n self.x = x\n self.y = y\n self.width = width\n self.height = height\n self.color = color\n self.name = name", "def add(self, *args, **kwargs):\n obj = self._class(*args, **kwargs)\n self._items.append(obj)", "def add_surface(self,s):\n self.surfaces.append(s)\n s.system=self.surfaces", "def add_cogs(self):\n self.add_cog(Voice(self))", "def add_fruit(self):\n # print('fruit added to container')", "def make_cloud(\n self,\n image_path: str,\n word_frequency: Dict[str, int],\n font_path: str,\n background_color: str = \"white\",\n width: int = 800,\n height: int = 600,\n **kwargs,\n ) -> WordCloud:\n word_cloud = WordCloud(\n font_path=font_path, background_color=background_color, width=width, height=height, **kwargs\n )\n word_cloud.generate_from_frequencies(word_frequency)\n word_cloud.to_file(image_path)\n print(f\"Saved image to {image_path}\")\n\n return word_cloud", "def point_cloud(self, X, Y, Z, size=1, color='#FF3232', bordercolor='#FF3232', legend='', width=0.5, opacity=1.0):\n point_cloud = go.Scatter3d(\n x=X,\n y=Y,\n z=Z,\n # showlegend=False,\n name=legend,\n mode='markers',\n marker=dict(\n size=size,\n color=color,\n line=dict(\n color=bordercolor,\n width=width\n ),\n # opacity=opacity\n )\n )\n\n return point_cloud", "def point_cloud(self):\n\t\tgen = self.loop(point_cloud=True)\n\t\tpoint_cloud = next(gen)\n\t\treturn point_cloud", "def add_clown(self):\n self.scenes[self.current_scene].add_object(Clown())\n self.redraw()", "def insert(self, *args):\n return _itkSurfaceSpatialObjectPointPython.vectoritkSurfaceSpatialObjectPoint3_insert(self, *args)", "def add(self, documents):\n\n if self.cluster:\n self.cluster.add(documents)\n else:\n super().add(documents)\n\n return documents", "def __init__(self, src, dst, plot=True, reinit=False):\n self.src = src\n self.dst = dst\n assert self.src.num == self.dst.num\n self.result = PointCloud(src)\n self.plot = plot\n self.reinit = reinit\n if self.plot: self.plotter = PointCloudPlotter(self.dst)", "def test_get_clouds(self):\n pass", "def add(self, c):\r\n if c in self.members:\r\n raise ValueError\r\n self.members.append(c)", "def __init__(self, name):\n self.cloud = name\n self.cloudtype = \"multipass\"\n config = Config()\n self.default = config[f\"cloudmesh.volume.{self.cloud}.default\"]\n self.cm = CmDatabase()", "def augment_cloud(Ps):\n \"Augmented params:\"\n pc_augm_scale=0\n pc_augm_rot=1\n pc_augm_mirror_prob=0.5\n pc_augm_jitter=0\n\n M = transforms3d.zooms.zfdir2mat(1)\n if pc_augm_scale > 1:\n s = random.uniform(1/pc_augm_scale, pc_augm_scale)\n M = np.dot(transforms3d.zooms.zfdir2mat(s), M)\n if pc_augm_rot:\n angle = random.uniform(0, 2*math.pi)\n M = np.dot(transforms3d.axangles.axangle2mat([0,0,1], angle), M) # z=upright assumption\n if pc_augm_mirror_prob > 0: # mirroring x&y, not z\n if random.random() < pc_augm_mirror_prob/2:\n M = np.dot(transforms3d.zooms.zfdir2mat(-1, [1,0,0]), M)\n if random.random() < pc_augm_mirror_prob/2:\n M = np.dot(transforms3d.zooms.zfdir2mat(-1, [0,1,0]), M)\n result = []\n for P in Ps:\n P[:,:3] = np.dot(P[:,:3], M.T)\n\n if pc_augm_jitter:\n sigma, clip= 0.01, 0.05 # https://github.com/charlesq34/pointnet/blob/master/provider.py#L74\n P = P + np.clip(sigma * np.random.randn(*P.shape), -1*clip, clip).astype(np.float32)\n result.append(P)\n return result", "def readCloud(self):\n Cldict = self.config.Cl\n cloudFile = os.path.join(self.config.path, self.config.cloudFile[self.idnum])\n\n self.cloud = []\n value_sorted_Cldict_keys = sorted(Cldict, key=lambda k: Cldict[k])\n for k in value_sorted_Cldict_keys:\n self.cloud.append([])\n self.nParticulate = len(Cldict.keys())\n with open(cloudFile, \"r\") as fp:\n expected_number = utils.get_expected_number_of_entries(fp)\n for line in fp:\n cval = utils.get_data_from(line)\n if cval is None or len(cval) != expected_number:\n continue\n for n in range(self.nParticulate): # Initialize all of the particulates to 0.0\n if n < len(cval):\n self.cloud[n].append(cval[n])\n else:\n self.cloud[n].append(0.0)\n self.nCloud = len(self.cloud[0])\n # ##Redo the particulate dictionary for the self.cloud index positions\n nid, sk = utils.invertDictionary(Cldict)\n for i, k in enumerate(sk):\n Cldict[nid[k]] = i\n self.config.Cl = Cldict\n\n self.cloud = np.array(self.cloud)\n # ## Check that P is monotonically increasing\n monotonic = np.all(np.diff(self.cloud[self.config.Cl['P']]) > 0.0)\n if not monotonic:\n self.cloud = np.fliplr(self.cloud)\n monotonic = np.all(np.diff(self.cloud[self.config.Cl['P']]) > 0.0)\n if not monotonic:\n raise ValueError(\"Pressure not monotonically increasing in {}\".format(cloudFile))\n\n # ## Renormalize so that deepest z is 0 and set DZ\n self._renorm_z('cloud')", "def enable_cloud(self, api_key, url=None):\n self.cloudservice.enable(api_key, url)", "def add_instance(self,name):\n new = self.create_instance(name)\n self.model.append(new)\n return new", "def create_point_cloud(self):\n pixels = []\n colors = []\n my_pixels = []\n for j in range(self.height):\n for i in range(self.width):\n depth = self.depth[j, i]\n pixels.append(\n [i * depth, j * depth, depth]\n )\n my_pixels.append(\n [i, j, 1]\n )\n # make rgb with flip()\n colors.append(np.flip(self.bgr[j, i, :]))\n # colors.append(self.bgr[j, i, :])\n self.my_pixels = my_pixels\n pixels = np.array(pixels)\n\n # project pixels to camera space\n self.xyz_points = self.intrinsics_inv @ np.transpose(pixels)\n self.color_points = colors\n\n # now add 1s to the points for homogenous coordinates\n num_points = self.get_num_xyz_points()\n ones = np.ones((1, num_points))\n self.xyzw_points = np.concatenate((self.xyz_points, ones), axis=0)\n\n self.scene = None\n self.camera_pose = None\n self.nm = None\n self.nl = None\n self.nc = None\n self.create_mesh()", "def got_info(self, cloud_obj):", "def create(cls, body: CloudAccount):\n\t\tpass", "def _project_pointcloud(self, cloud):\n\n assert isinstance(cloud, PointCloud2)\n\n pc1 = PointCloud()\n pc1.header = cloud.header\n # hack the time! dont move the robot :-0\n pc1.header.stamp = rospy.Time.now()\n \n \n pc1.points = [Point32(*p) for p in pc2.read_points(cloud)]\n\n self._tf_listener.waitForTransform(pc1.header.frame_id,\n self._image_info.tf_frame, \n rospy.Time(0), \n rospy.Duration(4))\n\n image_frame_cloud = self._tf_listener.transformPointCloud (\n self._image_info.tf_frame, \n pc1)\n min_x, max_x, min_y, max_y = 640, 0, 480, 0 # TODO: remove hard coded image size!\n for pt in image_frame_cloud.points:\n u, v = self._image_info.project3dToPixel((pt.x, pt.y, pt.z))\n if v < min_y:\n min_y = int(v)\n if v > max_y:\n max_y = int(v)\n if u < min_x:\n min_x = int(u)\n if u > max_x:\n max_x = int(u)\n location = (((min_x, min_y), (max_x, max_y)))\n rospy.loginfo(\"Transformed cloud into image plane\")\n return location", "def recognizing_clouds(cat):\n print(\"length in helpers\", len(cat))\n # finding clouds in catalogs\n base_table = pd.read_excel(r'/Users/shlomo/Desktop/Thesis/pythonProject/Combined plots/Other '\n r'catalogs/Table_Zucker.xlsx')\n\n names_col = []\n for cloud_number in range(len(base_table)):\n cloud_name = base_table[\"cloud\"][cloud_number]\n # getting locations from catalog\n locations_xyz = [base_table[\"x_pc\"][cloud_number], base_table[\"y_pc\"][cloud_number],\n base_table[\"z_pc\"][cloud_number]]\n # locations_lb = [base_table['l'][cloud_number], base_table['b'][cloud_number]]\n result = find_apt_line(locations_xyz, cat)[0]\n names_col.append([result, cloud_name])\n print([result, cloud_name])\n\n add_names(cat, names_col)\n return cat", "def test_update_cloud(self):\n pass", "def __call__(cls, *args, **kwargs):\n layer = super(LayerAspect, cls).__call__(*args, **kwargs)\n\n if Job.Current:\n Job.Current.addLayer(layer)\n \n layer.afterInit()\n return layer", "def __init__(self):\n super(GatherLastLayer, self).__init__()", "def on_add(self, project, name, **kwargs):\n pass", "def add_to_dataset(self, dataset: Dataset):\n pass", "def add(self, name, content):\n raise NotImplementedError", "def webAdd( self, web ):\n web.add( self )", "def fusion_api_add_datacenter(self, body, api=None, headers=None):\n return self.dc.create(body, api, headers)", "def _create_fleet(self):\n # make an alien\n alien = Alien(self)\n self.aliens.add(alien)", "def setup(self):\n base = automap_base()\n engine = create_engine(\"mysql+pymysql://\" + csconfig.config.db_user + \":\" +\n csconfig.config.db_password + \"@\" +\n csconfig.config.db_host + \":\" +\n str(csconfig.config.db_port) +\n \"/\" + csconfig.config.db_name)\n base.prepare(engine, reflect=True)\n session = Session(engine)\n cloud_yaml = base.classes.csv2_group_resource_yaml\n\n for cloud in self.group_resources:\n cloud_yamls = session.query(cloud_yaml).\\\n filter(cloud_yaml.group_name == self.name,\n cloud_yaml.cloud_name == cloud.cloud_name)\n cloud_yaml_list = []\n for yam in cloud_yamls:\n cloud_yaml_list.append([yam.yaml_name, yam.yaml, yam.mime_type])\n if cloud.cloud_type == 'localhost':\n newcloud = cloudscheduler.localhostcloud.LocalHostCloud(extrayaml=cloud_yaml_list, resource=cloud)\n else:\n newcloud = cloudscheduler.openstackcloud.\\\n OpenStackCloud(extrayaml=cloud_yaml_list, resource=cloud)\n self.clouds[newcloud.name] = newcloud\n self.log.debug(\"Added all clouds for group: %s\", self.name)", "def append(self, *args):\n self.add(*args)", "def maskClouds(self,img):\n\t\t\n\t\tscore = ee.Image(1.0);\n\t\t# Clouds are reasonably bright in the blue band.\n\t\tblue_rescale = img.select('blue').subtract(ee.Number(0.1)).divide(ee.Number(0.3).subtract(ee.Number(0.1)))\n\t\tscore = score.min(blue_rescale);\n\n\t\t# Clouds are reasonably bright in all visible bands.\n\t\tvisible = img.select('red').add(img.select('green')).add(img.select('blue'))\n\t\tvisible_rescale = visible.subtract(ee.Number(0.2)).divide(ee.Number(0.8).subtract(ee.Number(0.2)))\n\t\tscore = score.min(visible_rescale);\n\n\t\t# Clouds are reasonably bright in all infrared bands.\n\t\tinfrared = img.select('nir').add(img.select('swir1')).add(img.select('swir2'))\n\t\tinfrared_rescale = infrared.subtract(ee.Number(0.3)).divide(ee.Number(0.8).subtract(ee.Number(0.3)))\n\t\tscore = score.min(infrared_rescale);\n\n\t\t# Clouds are reasonably cool in temperature.\n\t\ttemp_rescale = img.select('thermal').subtract(ee.Number(300)).divide(ee.Number(290).subtract(ee.Number(300)))\n\t\tscore = score.min(temp_rescale);\n\n\t\t# However, clouds are not snow.\n\t\tndsi = img.normalizedDifference(['green', 'swir1']);\n\t\tndsi_rescale = ndsi.subtract(ee.Number(0.8)).divide(ee.Number(0.6).subtract(ee.Number(0.8)))\n\t\tscore = score.min(ndsi_rescale).multiply(100).byte();\n\t\tmask = score.lt(self.env.cloudThreshold).rename(['cloudMask']);\n\t\timg = img.updateMask(mask);\n \n\t\treturn img;", "def addLayer(self, layer):\n self.layers.append(layer)", "def append(self, *args, **kwargs): # real signature unknown\n pass", "def __add__(self, obj):\n if isinstance(obj, vtk.vtkProp3D):\n self.AddPart(obj)\n\n self.actors.append(obj)\n\n if hasattr(obj, \"scalarbar\") and obj.scalarbar is not None:\n if self.scalarbar is None:\n self.scalarbar = obj.scalarbar\n return self\n\n def unpack_group(scalarbar):\n if isinstance(scalarbar, Group):\n return scalarbar.unpack()\n else:\n return scalarbar\n\n if isinstance(self.scalarbar, Group):\n self.scalarbar += unpack_group(obj.scalarbar)\n else:\n self.scalarbar = Group([unpack_group(self.scalarbar), unpack_group(obj.scalarbar)])\n self.pipeline = vedo.utils.OperationNode(\"add mesh\", parents=[self, obj], c=\"#f08080\")\n return self", "def cloud_platform(self, cloud_platform):\n\n self._cloud_platform = cloud_platform", "def add(self, service: AbstractService):\n self.services.append(service)", "def append(self, layer):\n self.layers.append(layer)", "def append_layer(self, *args, **kwargs) :\n \n self.insert_layer(len(self._layers), *args, **kwargs)", "def AddLayer(self, *args):\n return _XCAFDoc.XCAFDoc_LayerTool_AddLayer(self, *args)", "def add(self, obj):\n self.objects.append(obj)\n if obj.gravity == 0:\n obj.gravity = self.gravity\n if obj.gravity_z == 0:\n obj.gravity_z = self.gravity_z", "def register(self, system_class):\n temp = system_class(self.evt, self.list, self)\n self.systems.append(temp)", "def add_cloudyvent_vm(self, runname, iaasid, nodeid, hostname, service_type, parent, runlogdir, vmlogdir):\n cyvm = self.get_by_iaasid(iaasid)\n if not cyvm:\n cyvm = _CYVM(runname, iaasid, nodeid, hostname, service_type, parent, runlogdir, vmlogdir)\n self.session.add(cyvm)\n return True\n else:\n cyvm.hostname = hostname\n cyvm.service_type = service_type\n cyvm.nodeid = nodeid\n cyvm.parent = parent\n return False", "def _has_cloud_band(self, band: BandNames) -> bool:\n return False", "def __init__(self,grib,config):\r\n self.grib_file_path = grib\r\n self.member_name = ModelData.get_member_name_from_path(grib)\r\n if self.member_name not in ModelData.member_names:\r\n ModelData.member_names.append(self.member_name)\r\n self.config = config\r\n ModelData.instances.append(self)\r\n return", "def add_class(self, new_class):\n index = self._counter\n self._counter += 1\n for element in new_class:\n self._class_names[element] = index\n node = self.part[index].append(element)\n self._place[element] = node", "def draw_clouds(a):\n small_cloud(0 + a, 0, 0)\n small_cloud(200 + a, -150, -10)\n big_cloud(350 + a, 0, -3)\n small_cloud(600 + a, -90, 3)\n small_cloud(800 + a, 0, 0)\n small_cloud(1000 + a, -150, -10)\n big_cloud(1150 + a, 0, -3)\n small_cloud(1400 + a, -90, 3)\n small_cloud(-800 + a, 0, 0)\n small_cloud(-600 + a, -150, -10)\n big_cloud(-450 + a, 0, -3)\n small_cloud(-200 + a, -90, 3)", "def get_cloud(self):\n self.has_cloud = False\n while not self.has_cloud:\n rospy.sleep(0.01)\n\n # cloud_time = self.active_cloud_msg.header.stamp\n # cloud_frame = self.active_cloud_msg.header.frame_id\n cloud = np.array(list(point_cloud2.read_points(self.active_cloud_msg)))[:, 0:3]\n mask = np.logical_not(np.isnan(cloud).any(axis=1))\n cloud = cloud[mask]\n\n print 'received cloud with {} points.'.format(cloud.shape[0])\n return cloud", "def NewLayer(self, event):\n pass", "def add_ga_classifier(self,\n child: Classifier,\n match_set: ClassifiersList,\n population: ClassifiersList):\n old_cl = self.find_old_classifier(child)\n\n if old_cl is None:\n self.append(child)\n population.append(child)\n if match_set is not None:\n match_set.append(child)\n else:\n if not old_cl.is_marked():\n old_cl.num += 1", "def cloud_storage_files(self, cloud_storage_files):\n\n self._cloud_storage_files = cloud_storage_files", "def addService(self, service):\n\t\tself.services.append(service)\n\t\treturn self", "def add_instance(self, context):\r\n self.instance_contexts.append(context)\r\n self.total_count += 1", "def create(self):\n logging.debug(\"%s create called\" % self)\n # networks = self.infra.get(\"networks\")\n notify(\"Creating network %s\" % self.name)\n self.cloudnet = cn.create(self.name, cidr=self.cidr)\n return True", "def big_cloud(a, b, c):\n arcade.draw_circle_filled(100 + a, 700 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(70 + a, 740 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(120 + a, 745 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(171 + a, 750 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(170 + a, 700 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(200 + a, 730 + b, 40 + c, arcade.color.WHITE)", "def big_cloud(a, b, c):\n arcade.draw_circle_filled(100 + a, 700 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(70 + a, 740 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(120 + a, 745 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(171 + a, 750 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(170 + a, 700 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(200 + a, 730 + b, 40 + c, arcade.color.WHITE)", "def plotrgcloud(self):\n print self.kpunten\n for i in range(len(self.kpunten[0])):\n self.writetext('sen ='+ self.kpunten[0][i][0], (0.65,0.85), axnum = 0, hor = None ,ver = None , rot = None ,fs =14 , transform = self.fig.axes[0].transAxes)\n if i == len(self.kpunten[0]) -1 :\n end = None\n else:\n end = self.kpunten[0][i+1][1] + 1\n print end\n self.plotrgwrap( self.rgindex,2*self.reader.npair+self.rgindex,'real part of rgvars (a.u)' , 'imaginary part of rgvars (a.u.)', tit ='RG vars g = %f all states'%(self.chardata) , begin = self.kpunten[0][i][1] , stop = end , name = 'cpcloud'+ self.kpunten[0][i][0] , filenum = 0)", "def create_wordcloud(self, text):\n text = ' '.join(f\"{word}\" for word in text)\n mask = np.array(Image.open(os.path.join(CURRDIR, \"cloud.png\")))\n wc = WordCloud(background_color=\"white\",\n max_words=200,\n mask=mask)\n wc.generate(text)\n wc.to_file(PATH_TO_SAVE_IMG, \"wordle.png\")", "def _populate_clouds():\n for key, value in list(globals().items()):\n if not key.startswith('_') and key.endswith(\n 'Cloud') and key != 'Cloud':\n if not value._controller_cls:\n continue\n if issubclass(value, Cloud) and value is not Cloud:\n CLOUDS[value._controller_cls.provider] = value\n\n # Add aliases to CLOUDS dictionary\n for key, value in config.PROVIDERS.items():\n driver_name = value['driver']\n cloud_aliases = [key] + value['aliases']\n if CLOUDS.get(driver_name):\n for alias in cloud_aliases:\n CLOUDS[alias] = CLOUDS[driver_name]\n else:\n value = next((CLOUDS.get(alias) for alias in cloud_aliases\n if CLOUDS.get(alias)), None)\n if value:\n for alias in cloud_aliases:\n CLOUDS[alias] = value", "async def async_added_to_opp(self):\n self.opp.data[DOMAIN][\"entities\"][\"scene\"].append(self)", "def __iadd__(self, point):\n self.points.append(point)\n return self", "def insert(self, *args):\n return _itkSurfaceSpatialObjectPointPython.vectoritkSurfaceSpatialObjectPoint2_insert(self, *args)", "def __init__(self, name=None):\n\n conf = Config()[\"cloudmesh\"]\n super().__init__(name)\n\n self.user = Config()[\"cloudmesh\"][\"profile\"][\"user\"]\n self.spec = conf[\"cloud\"][name]\n self.cloud = name\n\n self.default = self.spec[\"default\"]\n self.cloudtype = self.spec[\"cm\"][\"kind\"]\n\n self.cred = self.spec[\"credentials\"]\n self.default = self.spec[\"default\"]\n self.project_id = self.cred[\"auth\"][\"project_id\"]\n\n # pprint(self.cred)\n\n self.cloudman = openstack.connection.Connection(**self.cred)\n\n # self.default_image = deft[\"image\"]\n # self.default_size = deft[\"size\"]\n # self.default.location = cred[\"datacenter\"]\n\n try:\n self.public_key_path = conf[\"profile\"][\"publickey\"]\n self.key_path = path_expand(\n Config()[\"cloudmesh\"][\"profile\"][\"publickey\"])\n f = open(self.key_path, 'r')\n self.key_val = f.read()\n except:\n raise ValueError(\"the public key location is not set in the \"\n \"profile of the yaml file.\")", "def clouds(self):\n key = '/'\n dataray = np.zeros([113, 2, self.config['temporal_range_size']])\n\n time_meas = self.config['time_measured']\n\n j = 0 # index measured data\n k = 0 # index cloud data\n while j < self.config['temporal_range_size']:\n values, attrb = loadhdf5file(self.files_h5[k], key=key)\n time = datetime.datetime.strptime(attrb['Time'],\n '%Y-%m-%d %H:%M:%S') #%Y%m%d_%H%M%S') # string\n time_m = datetime.datetime.strptime(time_meas[j],\n '%Y%m%d_%H%M%S') # string\n delta_time = datetime.timedelta(seconds=10)\n\n if j == 0:\n if time_m + delta_time < time:\n print(\n 'Time difference between initial measurement and cloud properties is too big')\n break\n else:\n pass\n\n if time_m <= time:\n dataray[:, 0, j] = values['cloud_cover'][:, 1]\n dataray[:, 1, j] = values['brightness'][:, 1]\n j += 1\n else:\n k += 1\n\n channels = np.arange(113)\n columns = ['cloud_cover', 'brightness']\n attrb['Columns'] = columns\n print(len(dataray), len(time_meas))\n # delete non global attributes\n del attrb['Brightness_mean']\n del attrb['Cloud_cover_mean']\n del attrb['Time']\n del attrb['UTC']\n\n # Create a file in the disk and add attributes to dataArray\n da = xr.DataArray(dataray, coords=[channels, columns, time_meas],\n dims=['channel', 'columns', 'time'],\n name='Cloud conditions',\n attrs=attrb)\n return da", "def process(self, car):\n super(self.__class__, self).process(car)\n car.add_new_car(self)", "def add(self):\n self.create(self.fs.name)\n # Mark a volume as 'static' if created from a snapshot\n # Note that if a volume is marked as 'static', it is assumed it\n # can be deleted upon cluster termination!\n if (ServiceRole.GALAXY_DATA not in self.fs.svc_roles and\n (self.from_snapshot_id is not None or self.from_archive is not\n None)):\n log.debug(\"Marked volume '%s' from file system '%s' as 'static'\" %\n (self.volume_id, self.fs.name))\n # FIXME: This is a major problem - any new volumes added from a snapshot\n # will be assumed 'static'. This is OK before being able to add an\n # arbitrary volume as a file system but is no good any more. The\n # problem is in automatically detecting volumes that are supposed\n # to be static and are being added automatically at startup\n if self.from_archive:\n self.fs.kind = 'volume' # Treated as a regular volume after initial extraction\n else:\n self.static = True\n self.fs.kind = 'snapshot'\n else:\n self.fs.kind = 'volume'\n if self.attach():\n us = os.path.join(self.app.path_resolver.galaxy_data, 'upload_store')\n misc.remove(us)\n log.debug(\"Volume attached, mounting {0}\".format(self.fs.mount_point))\n self.mount(self.fs.mount_point)", "def add(self, *args, **kwargs):\n return self.load(*args, **kwargs)", "def insert(self, *args, **kwargs):\n return _image.image_insert(self, *args, **kwargs)", "def cloud(xc, yc, c):\n white = (255, 255, 255) # cloud's color\n\n circle(screen, white, (xc, yc), 30 * c) # white circle that comprises part of the cloud\n circle(screen, white, (xc + 30 * c, yc + 30 * c), 30 * c) # white circle that comprises part of the cloud\n circle(screen, white, (xc + 30 * c, yc - 30 * c), 30 * c) # white circle that comprises part of the cloud\n circle(screen, white, (xc + 60 * c, yc), 30 * c) # white circle that comprises part of the cloud\n circle(screen, white, (xc + 90 * c, yc + 30 * c), 30 * c) # white circle that comprises part of the cloud\n circle(screen, white, (xc + 90 * c, yc - 30 * c), 30 * c) # white circle that comprises part of the cloud\n circle(screen, white, (xc + 120 * c, yc), 30 * c) # white circle that comprises part of the cloud", "def addEllipse(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n pass", "def convertcloud(points):\n pcd = open3d.geometry.PointCloud()\n pcd.points = open3d.utility.Vector3dVector(points)\n return pcd", "def __init__(self, *args):\n this = _libsbml.new_KineticLaw(*args)\n try: self.this.append(this)\n except: self.this = this", "def small_cloud(a, b, c):\n arcade.draw_circle_filled(100 + a, 700 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(70 + a, 740 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(120 + a, 745 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(150 + a, 720 + b, 40 + c, arcade.color.WHITE)", "def small_cloud(a, b, c):\n arcade.draw_circle_filled(100 + a, 700 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(70 + a, 740 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(120 + a, 745 + b, 40 + c, arcade.color.WHITE)\n arcade.draw_circle_filled(150 + a, 720 + b, 40 + c, arcade.color.WHITE)", "def augment(self, *args, **kwargs):\n pass" ]
[ "0.6437736", "0.62252134", "0.6021022", "0.5750617", "0.5715046", "0.5662575", "0.55289835", "0.54481226", "0.5443279", "0.5443279", "0.54299885", "0.5409173", "0.53697765", "0.53604543", "0.5280927", "0.5277254", "0.5239339", "0.52346087", "0.5227393", "0.5209213", "0.5187303", "0.51605374", "0.51576996", "0.51359576", "0.51243716", "0.5064991", "0.5046525", "0.5037917", "0.5032406", "0.50057065", "0.49693248", "0.49687058", "0.49625686", "0.49575427", "0.49527222", "0.49496353", "0.49438125", "0.49402624", "0.49376133", "0.49200925", "0.49077567", "0.49029097", "0.49014983", "0.48969114", "0.48960236", "0.48896396", "0.4873304", "0.4862431", "0.4856604", "0.48471522", "0.48425844", "0.483617", "0.48349613", "0.48337525", "0.48330122", "0.4822331", "0.48220104", "0.48199838", "0.48179358", "0.48114043", "0.4808689", "0.47974256", "0.47868377", "0.4784896", "0.47848886", "0.47835466", "0.4781475", "0.4778986", "0.47774452", "0.477093", "0.47644877", "0.47633517", "0.47561428", "0.47518793", "0.47504124", "0.47476044", "0.4744836", "0.47413522", "0.47413442", "0.4736881", "0.4736881", "0.47315553", "0.47088385", "0.47073922", "0.47053182", "0.4704103", "0.47013274", "0.47010696", "0.47010508", "0.46989983", "0.46967784", "0.4689279", "0.468811", "0.46808007", "0.467803", "0.46767548", "0.46745482", "0.46744382", "0.46744382", "0.46743396" ]
0.5768774
3
Decide whether the minitaur has fallen. If the up directions between the base and the world is larger (the dot product is smaller than 0.5), the minitaur is considered fallen.
def is_fallen(self): orientation = self.minitaur_env.minitaur.GetBaseOrientation() rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation) local_up = rot_mat[6:] return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def at_goal(self):\n return self.distance_from_goal < self.robot.wheels.base_length/2", "def at_goal(self):\n return self.distance_from_goal < self.robot.wheels.base_length/2", "def is_upper(self):\n return self.z < 0", "def is_at_wall(self):\n return self.distmin < self.distmax*0.8", "def binary_blow_wind():\n s = random.random()\n return s < 0.05", "def impact(self, ground):\n return self.position[1] > ground", "def feller(self):\n return 2 * self.kappa_y * self.mean_v - self.eta_y**2 > 0", "def safe(self): \n wall_far = self.distmin > self.distmax*0.6\n # Check which way to go\n if wall_far:\n self.at_wall()\n return wall_far", "def isgood(self):\n\t\tanswer = True\n\t\t\n\t\tif self.mes_flux <= 0.0:\n\t\t\tanswer = False\n\n\t\treturn answer", "def isGoalState(self, state):\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and \\\n abs(diff.x) + abs(diff.z) == 2 and \\\n state.get_block(self._player_loc + diff/2 + _Vec3(0, -1, 0)) not in \\\n (_AIR, _LAVA, _WATER)", "def inCamp(self):\n return (((self.myTeam==1) and (self.ballPos.x <= self.width/2))\n | ((self.myTeam==2) and (self.ballPos.x >= self.width/2)))", "def iswalking(self):\r\n return self.model.coord!=self.model.targetcoord", "def goalReached(self, rewards):\n return len(rewards) >= 100 and np.mean(rewards[-100:]) >= 18", "def check_gravity(self, screen_height):\n if self.loc[1] + self.rect[1] > screen_height - self.rect[1]/3-10: # TODO - Add constant variable here\n self.loc[1] = screen_height - self.rect[1] + 1 - self.rect[1]/3-10\n if Action.falling in self.actions:\n self.remove_action(Action.falling)\n else:\n self.add_action(Action.falling)", "def checkFuel(self):\n return self.maze.checkFuelCost(self.checkpoint,currentLap = self.laps) - self.timeDriving", "def hit(self):\n if self._power == 0 :\n return False\n self._power -= 1\n return True", "def checkGoal(self):\n # -- It is not included for simplifity --#\n if self.reward_cumulative != None:\n x = round((abs(self.reward_cumulative) - abs(round(self.reward_cumulative))) * 100);\n rem_goal = x % 25\n rem_timeout = x % 20\n if rem_goal == 0 and x != 0:\n self.is_goal = True\n else:\n self.is_goal = False\n\n if rem_timeout == 0 and x != 0:\n self.is_timeout = True\n else:\n self.is_timeout = False", "def is_contagious(self):\n if self.health >= 0 and self.health <= 49:\n return True\n elif self.health >= 50 and self.health <= 100:\n return False", "def is_almost_active(self,\n env\n ):\n if not hasattr(self, \"tolerance\") or self.tolerance is None:\n return False\n c_value = self.get_value(env)\n flag = np.any(np.greater(c_value + self.tolerance, 0.))\n return bool(flag)", "def isInGoal(self):\n coordx= self.playerPos.x\n coordy= self.playerPos.y\n target = 0 if self.id_team == 1 else 1\n\n if((((target == 0)and (coordx<=5))|\n ((target == 1) and(coordx>145))) \n and (coordy<=50 and coordy>=40)):\n return True\n else:\n return False", "def termialTest(state):\n if state.isWin() or state.isLose():\n return True\n return False", "def reached_goal(self):\n for i in range(self.simulator_.num_agents):\n if rvo_math.abs_sq(self.simulator_.agents_[i].position_ - self.goals_[i]) > self.simulator_.agents_[i].radius_ * self.simulator_.agents_[i].radius_:\n return False\n\n return True", "def constrain_sun(self, position: str) -> bool:\n if (position == \"up\" and self.sun_up()) or (\n position == \"down\" and self.sun_down()\n ):\n return True\n return False", "def checkBottom(self):\n exposed = True\n for sprite in self.overlapping_sprites:\n if sprite not in self.game.neutrinos:\n a = abs(self.bottom - sprite.top)\n b = abs(self.top - sprite.bottom)\n c = abs(self.left - sprite.right)\n d = abs(self.right - sprite.left)\n if a < b and a < c and a < d:\n exposed = False\n break\n return exposed", "def falling(self):\r\n if self.y_vel < 10:\r\n self.y_vel += self.gravity", "def has_uav_reached_current_waypoint(self):\n return self.drone.has_reached_waypoint()", "def check_boundaries(self):\n # Checks if the enemy bar has gone of the net\n if self.rect.left <= self.settings.WINDOW_WIDTH / 2:\n self.rect.left = self.settings.WINDOW_WIDTH / 2\n self.isMovingUp = False\n\n # Checks if the enemy bar has gone out of bound to the right\n if self.rect.right >= self.settings.WINDOW_WIDTH:\n self.rect.right = self.settings.WINDOW_WIDTH\n self.isMovingUp = True", "def in_fire(self):\n Fire=False\n if self.state>0 and self.state<=5:\n Fire=True\n return Fire", "def check_falling(self, obstacles):\n self.rect.move_ip((0, 1))\n if not pygame.sprite.spritecollideany(self, obstacles):\n if not self.climb:\n\t self.fall = True\n\n self.rect.move_ip((0, -1))", "def hits_top_or_bottom(self):\n if self.y >= self.scene.screen.get_height() - self.image.get_height() or self.y <= 0:\n return True\n else:\n return False", "def goal_test(self, state):\n for x, y in state.alvos:\n if state.tabuleiro[x][y] is not BOX_ON_TARGET:\n return False\n return True", "def is_goal(self):\n if 0 in self.final_values: # Check if any zeroes are in the final states\n return False\n return True", "def is_jumping(self):\n if(self.going_down or self.going_up or self.mid_air):\n return True\n else:\n return False", "def isinvertible(self):\n if np.all(np.abs(self.maroots) > 1):\n return True\n else:\n return False", "def is_hungry(self) -> bool:\n if self.eat_count <= 3:\n return True\n else:\n return False", "def is_wall_collided(self)-> bool:\n # print('{} >= {} or {} <= 0'.format(self.x + self.width, self.windows_size))\n if self.x <= 0:\n self.velocity = -self.velocity\n return True\n return False", "def check_offset(self):\n\n for d in range(self.n_dmps):\n if abs(self.y0[d] - self.goal[d]) < 1e-4:\n self.goal[d] += 1e-4", "def goal_test(self):\n if -1 in self.state:\n return False\n else:\n return True", "def is_sealed(self):\n return self.walls == Direction.All", "def check_overflow(self):\n self.stateC = self.toConceptual(self.state)\n\n check_inf = torch.any(torch.isinf(self.stateC)) or torch.any(\n torch.isinf(self.state))\n check_nan = torch.any(torch.isnan(self.stateC)) or torch.any(\n torch.isnan(self.state))\n\n if check_inf or check_nan:\n return True\n else:\n return False", "def can_move(self, next_x, next_y):\n\t\tif self.battery == 0:\n\t\t\tif self.planet.tiles[next_y][next_x].is_shaded():\n\t\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"+\":\n\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"-\":\n\t\t\treturn False\n\t\treturn True", "def normal_defense(self):\n if self.game.get_my_mana() > DEFENSE_MANA_CAP:\n self.portals.dumb_castle_defense(DEFENSE_MANA_CAP)\n self.portals.dumb_portal_defense(PORTAL_SELF_DEFENSE_MANA_CAP)", "def isOutsideBorder(self):\n if (self.posX < -self.myGalaxy.worldWidth or self.posX > self.myGalaxy.worldWidth or\n self.posY < -self.myGalaxy.worldHeight or self.posY > self.myGalaxy.worldHeight):\n return 1\n return 0", "def telofase(self):\n\n\n var = self.varianza()\n if var >= 6.5:\n self.guardar_imagen('telofase')\n return True\n return False", "def is_water(self):\n return False", "def is_water(self):\n return False", "def is_unhappy(self):\n #checked!#\n ###your code here###\n same=0\n for i in self.home.neighbors:\n if i.occupant!=None:\n if i.occupant.group==self.group:\n same+=1\n happniess=float(same)/len(self.home.neighbors)\n if happniess<self.happiness_threshold:\n return True\n else:\n return False", "def goal_reached(self, position):\n return position >= self.goal", "def close_to_exceeding(self) -> bool:\n mean = self.current / self.num_cuts\n if self.max_frames is not None:\n return self.current + mean > self.max_frames\n if self.max_samples is not None:\n return self.current + mean > self.max_samples\n if self.max_duration is not None:\n return self.current + mean > self.max_duration\n return False", "def check_reached(self):\n m_x, m_y = self.destination.get_pos()\n m_radius = self.destination.radius\n distance_centre = math.sqrt((m_x - self.x)**2 + (m_y - self.y)**2)\n sum_radii = m_radius + self.radius\n if distance_centre < sum_radii:\n self.color = pygame.colordict.THECOLORS['green']\n self.has_reached = True", "def check_floor(self):\r\n if self.current_floor > self.destination_floor:\r\n self.down = True\r\n elif self.current_floor < self.destination_floor:\r\n self.up = True", "def spendFuelToSurvive(self):\n fuelNeeded = self.getLightUpkeep()\n woodNeeded = math.ceil(\n fuelNeeded / self.configs[\"parameters\"][\"RESOURCE_TO_FUEL_RATE\"][\"WOOD\"]\n )\n woodUsed = min(self.cargo[\"wood\"], woodNeeded)\n fuelNeeded -= woodUsed * self.configs[\"parameters\"][\"RESOURCE_TO_FUEL_RATE\"][\"WOOD\"]\n self.cargo[\"wood\"] -= woodUsed\n if fuelNeeded <= 0:\n return True\n\n coalNeeded = math.ceil(\n fuelNeeded / self.configs[\"parameters\"][\"RESOURCE_TO_FUEL_RATE\"][\"COAL\"]\n )\n coalUsed = min(self.cargo[\"coal\"], coalNeeded)\n fuelNeeded -= coalUsed * self.configs[\"parameters\"][\"RESOURCE_TO_FUEL_RATE\"][\"COAL\"]\n self.cargo[\"coal\"] -= coalUsed\n\n if fuelNeeded <= 0:\n return True\n\n uraniumNeeded = math.ceil(\n fuelNeeded / self.configs[\"parameters\"][\"RESOURCE_TO_FUEL_RATE\"][\"URANIUM\"]\n )\n uraniumUsed = min(self.cargo[\"uranium\"], uraniumNeeded)\n fuelNeeded -= uraniumUsed * self.configs[\"parameters\"][\"RESOURCE_TO_FUEL_RATE\"][\"URANIUM\"]\n self.cargo[\"uranium\"] -= uraniumUsed\n\n if fuelNeeded <= 0:\n return True\n\n return fuelNeeded <= 0", "def if_goal_reached(self, pose):\n dx = self.pos.x - pose.x\n dy = self.pos.y - pose.y\n dist = math.sqrt(dx ** 2 + dy ** 2)\n return dist < self.radiu", "def detect_in_bounds(self):\n creature_x, creature_y = self.creature.current_location\n if creature_x < 0 or creature_x >= self.world_width\\\n or creature_y < 0 or creature_y >= self.world_height:\n print('The creature is out of bounds!')\n return False\n return True", "def is_goal(self):\n if self.team1.get_cur_hp() == 0:\n return 1\n elif self.team2.get_cur_hp() == 0:\n return -1\n else:\n return 0", "def attack_friendly(self):\n if self.friendly_pos == self.friendly_fight_pos and self.x_speed != -10:\n self.x_speed = 10\n self.y_speed = -10\n elif self.friendly_pos[0] >=150: #150 being when diriction switches\n self.x_speed = -10\n self.y_speed = 10\n elif self.friendly_pos == self.friendly_fight_pos and self.x_speed == -10:\n self.x_speed = 0\n self.y_speed = 0\n return True", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def check(self):\n self.lower_bound(5e-4)\n self.upper_bound(5e2)", "def has_undercoordinated_metal(self):\n return self._has_low_metal_coordination()", "def is_elevation(self):\n return not self._is_depth", "def is_hom_alt(self) -> bool:\n return self.is_hom() and (self.allele1 > 0 or self.allele2 > 0)", "def heal(self):\n self.infected = False", "def is_boring(state):\n return state.boring_moves >= state.just_stop", "def working(self, location):\n # say we can detect a thermal if the updraft is 5% of it's peak...\n R = numpy.linalg.norm(\n (self._x - location) * numpy.array([1.0, 1.0, 0.0]))\n min_distance = -numpy.log(0.05) * self._r\n if R < min_distance:\n return self._w > 0\n else:\n return None", "def is_heating(self) -> bool:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"is_heating\"))\r\n return (self._ambient_temperature < self._target_temperature) \\\r\n and self.can_heat \\\r\n and (self._hvac_mode == \"heat\" or self._hvac_mode == \"heat-cool\")", "def check_mario_y_collisions(self):\n ground_step_or_pipe = pygame.sprite.spritecollideany(self.mario, self.ground_step_pipe_group)\n if ground_step_or_pipe:\n self.adjust_mario_for_y_ground_pipe_collisions(ground_step_or_pipe)\n self.test_if_mario_is_falling()", "def testUpperBound(self,time,accel):\n\t\tif (time - self.timestamp) > ParserSettings.TIME_DELTA:#tests lockout threshold of a flick event\n\t\t\tif accel < self.upper:#tests if flick maximum is found, relative to previous magnitude\n\t\t\t\tself.timestamp\t= time#once peak found, set appropriate data and return a flick\n\t\t\t\ttoReturn\t\t= self.upper\n\t\t\t\tself.upper\t\t= 0\n\t\t\t\treturn toReturn\n\t\t\telse:\n\t\t\t\tself.upper = accel#if no flick yet, update most recent flick to test\n\t\t\t\treturn 0\n\t\telse:\n\t\t\treturn 0", "def isNearTo(self, point):\n # BBB: I'm using a majored version of the collide rect to fix a problem with a charas-bouncing-effect on movement... :-|\n x, y = self.currentLevel.transformToScreenCoordinate(point)\n collide_rect = self.collide_rect\n collide_rect.height+=3\n return collide_rect.collidepoint(x, y)", "def is_hungry(self):\r\n if self._hunger > 0:\r\n return True\r\n else:\r\n return False", "def reached_angle(self, angle):\n if self.ros_node.get_data(\"/auto/hood/current/angle\") == angle:\n return True\n return False", "def is_won(self):\n for tile in self:\n if not tile.is_mine and tile.visibility != 1:\n return False\n return True", "def ground_range(self) -> Union[int, float]:\n return self.ground_weapon and self.ground_weapon.range", "def is_unit(self):\n return math.isclose(self.magnitude(), 1)", "def _is_safe_to_back_up(self):\n # Only back up if we're far enough away from home for it\n # to be safe. Don't want to back up into the nest!\n home_loc = self.swarmie.get_home_odom_location()\n current_loc = self.swarmie.get_odom_location().get_pose()\n dist = math.sqrt((home_loc.x - current_loc.x) ** 2\n + (home_loc.y - current_loc.y) ** 2)\n if dist > 1.5:\n return True\n\n angle_to_home = self.get_angle_to_face_point(home_loc)\n if abs(angle_to_home) < math.pi / 2:\n return True\n\n return False", "def is_right(self):\n if self.pupils_located:\n return self.horizontal_ratio() <= 0.35", "def goal_test(self, state):\n return state.board==range(self.N*self.N)", "def is_seventyfive_moves(self) -> bool:\n if self.halfmove_clock >= 100:\n if any(self.generate_legal_moves()):\n return True\n return False", "def is_upper_limit(self):\n is_upper = self.get_raw_status() & self.STATUS_ULIM\n return bool(is_upper)", "def check_boundary(self):\n turtle_position = self.turtle.position()\n if turtle_position[0] > self.screen_width/2 - 40 and int(self.turtle.heading()) == 0:\n return False\n if turtle_position[0] < -self.screen_width/2 + 40 and int(self.turtle.heading()) == 180:\n return False\n if turtle_position[1] > self.screen_height/2 - 40 and int(self.turtle.heading()) == 90:\n return False\n if turtle_position[1] < -self.screen_height/2 + 40 and int(self.turtle.heading()) == 270:\n return False\n return True", "def check_enemy_fleet(self):\n if len(self.enemyShips) > 0:\n response = False\n for ship in self.enemyShips:\n if ship.afloat == True:\n response = True\n return response", "def check_goal(self):\n hero = self.objects[0]\n others = self.objects[1:]\n\n for other in others:\n if other.x == hero.x and other.y == hero.y:\n self.objects.remove(other)\n if other.reward == 1:\n self.objects.append(GameObject(self.__new_position(), 1,\n 1, 1, 1, \"goal\"))\n elif other.reward == -1:\n self.objects.append(GameObject(self.__new_position(), 1,\n 1, 0, -1, \"fire\"))\n return other.reward, False\n return 0.0, False", "def IsGameOver(self):\n return any(c.cX + c.width >= self.end_location for c in self.enemies)", "def is_shooting(self):\n if self.gun_interface:\n return self.gun_interface.is_preparing()\n return False", "def isGoal(self):\n for index in range(self.DIM):\n if not self.values('r',index).count(0) is 0:\n return False\n if not self.isValid():\n return False\n return True", "def is_bias(self):\n if self.is_power_onoff():\n return False\n btest = re.compile('\\Wbias\\W|^bias\\W|^bias$|\\Wbias$', re.IGNORECASE)\n return btest.search(self['target']) != None", "def reached_final_point():\n return all(point.constraints[b.atom_indexes] == b.final_dist\n for b in self.bonds)", "def will_infect(population, x, y, strength):\n return strength >= population[y][x] and population[y][x] != -1", "def is_valid(self):\n posit1 = (self.mean_v > 0) & (self.kappa_y > 0) & (self.eta_y > 0)\n posit2 = (self.kappa_s > 0) & (self.eta_s > 0)\n return posit1 & posit2 & self.feller()", "def flap(self):\n\n if self.pos_y > -2 * IMAGES['player'][0].get_height():\n self.vel_y = self.acc_flap\n self.flapped = True\n self.last_flapped = time()", "def isFarad(self):\n return _libsbml.Unit_isFarad(self)", "def is_game_over(self):\n if self.just_cheated_a or self.just_cheated_b:\n return False\n if self.game_stage == 3:\n return (self.die_a.current_value == \"5\" and self.die_b.current_value == \"6\" or\n self.die_a.current_value == \"6\" and self.die_b.current_value == \"5\")\n else:\n return False", "def fight(you, boss):\n you_attack = you['damage'] - boss['armor']\n if you_attack < 1:\n you_attack = 1\n boss_attack = boss['damage'] - you['armor']\n if boss_attack < 1:\n boss_attack = 1\n boss_turns = np.ceil(you['hit']/boss_attack)\n you_turns = np.ceil(boss['hit']/you_attack)\n return you_turns <= boss_turns", "def test_is_mountain_in_range(self):\n self.assertTrue(self.user_location.is_mountain_in_range(self.mountain_one))\n self.assertFalse(self.user_location.is_mountain_in_range(self.mountain_two))", "def __isFarFromLevel(self, l):\n\n s = np.mean(self.df['high'] - self.df['low'])\n return np.sum([abs(l-x) < s for x in self.levels]) == 0", "def _will_land(self, platform_contact):\n uprightness = self._uprightness(b2Vec2(0,1))\n\n platform_dot = b2Dot(platform_contact.normal, b2Vec2(0,1))\n\n# print self, platform_contact.shape2.GetBody().userData, platform_dot\n# print platform_contact\n # Reject any platform at an angle greater than 60 degrees\n if platform_dot < 0.5:\n return False\n\n\n score = uprightness * self.body.linearVelocity.Length()\n\n # Only consider score component perpendictular to the platform\n score *= platform_dot\n\n return score < self.stats.max_landing_speed", "def game_over(self):\n return self.lives() < 0", "def can_reach(self, position: tuple) -> bool:\n x = position[0]\n y = position[1]\n dist_to_shoulder = math.sqrt(x**2 + y**2)\n max_dist = self.bicep_length + self.forearm_length\n if dist_to_shoulder > max_dist:\n return False\n return True", "def decide(self) :\n (self.futurX,self.futurY) = self.randomNextPos()\n if self.fishBreedTimeCPT == 0 :\n self.naissance = True\n self.fishBreedTimeCPT = self.fishBreedTime\n else :\n self.fishBreedTimeCPT = self.fishBreedTimeCPT - 1\n\n if self.env.grille[self.futurY][self.futurX] == None :\n self.bougera = True\n else :\n self.bougera = False\n\n self.update()", "def isHittingHigh(self):\n # The switch on the practice bot has opposite configuration (NC vs NO)\n return (self.limHigh.get()) if config.isPracticeBot else (not self.limHigh.get())", "def exposed(self, position):\r\n x, y, z = position\r\n for dx, dy, dz in FACES:\r\n if (x + dx, y + dy, z + dz) not in self.world:\r\n return True\r\n return False" ]
[ "0.63688046", "0.63688046", "0.58916235", "0.58059263", "0.5784403", "0.5761813", "0.57414496", "0.5711356", "0.5707844", "0.5697768", "0.5675407", "0.56628335", "0.56495374", "0.56233096", "0.55607885", "0.555384", "0.55337554", "0.5527628", "0.5523818", "0.55205244", "0.5502741", "0.5499305", "0.5473292", "0.5470162", "0.54622036", "0.5449335", "0.544772", "0.5410455", "0.5399193", "0.53937596", "0.53899014", "0.5388183", "0.5368931", "0.53676575", "0.53520143", "0.53422093", "0.53294855", "0.5323589", "0.53210163", "0.53162277", "0.5314975", "0.531201", "0.530783", "0.52929246", "0.52820766", "0.52820766", "0.5280792", "0.5272965", "0.5263161", "0.52598214", "0.52596223", "0.5255457", "0.525262", "0.5248661", "0.5243043", "0.52320755", "0.5224237", "0.52171636", "0.5206465", "0.52039766", "0.5194789", "0.5190278", "0.51832926", "0.517894", "0.5166597", "0.51630825", "0.51583976", "0.5140441", "0.51370496", "0.51334786", "0.51267004", "0.51259655", "0.51073605", "0.51036143", "0.50935704", "0.5090845", "0.5087048", "0.50868505", "0.5084879", "0.50843954", "0.50839007", "0.5082957", "0.5078785", "0.5077521", "0.50711936", "0.50657153", "0.506569", "0.506363", "0.50585496", "0.50579274", "0.50576574", "0.50568014", "0.5052334", "0.50509864", "0.50452465", "0.5044437", "0.50439984", "0.5036659", "0.5032398", "0.5030512" ]
0.72476137
0
Write a FreeSurfer label.
def write_label(filename, label, verbose=None): with open(filename, 'wb') as fid: n_vertices = len(label.vertices) data = np.zeros((n_vertices, 5), dtype=np.float) data[:, 0] = label.vertices data[:, 1:4] = label.coords # self.pos #1e3 * data[:, 4] = label.values fid.write(b("#%s\n" % label.comment)) fid.write(b("%d\n" % n_vertices)) for d in data: fid.write(b("%d %f %f %f %f\n" % tuple(d))) return label
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def writeLabel(self, label):\r\n\r\n # Label declaration.\r\n self.filename.write(\"({}:{})\\n\".format(self.actualFile.upper(), label.upper()))", "def write_label(self, label):\n self._write_line('label ' + label) # TODO generate unique labels?", "def write_label(self, label: str) -> None:\n self._write(f'({self._file_name}${label})')", "def label(self, s):\n if self._label_width < 0:\n self.stream.write(s)\n self._label_width = len(s)\n else:\n self.flush(_width=self._label_width)\n self.stream.write(s)\n self._label_width = -1", "def write_label(output_file, label, curr_function):\n output_file.write(\"(\" + curr_function[0] + \"$\" + label + \")\" + \"\\n\")", "def write_label(self, label):\n\n symbol = LabelIfGoto.get_label_symbol(self.func_name, label)\n asm_code = \"({})\".format(symbol)\n self.write_line(asm_code)", "def write_if(self, label: str) -> None:\n self._pop_stack_to_d()\n self._write(f'@{self._file_name}${label}')\n self._write('D;JNE')", "def make_unscoped_label(self, label):\n self.write(\"(\" + label + \")\\n\")\n # write (label_name)", "def write_fasta(sequence, label, HANDLE):\n HANDLE.write(\">\"+label+\"\\n\")\n HANDLE.write(sequence + \"\\n\")", "def write_goto(self, label: str) -> None:\n self._write(f'@{self._file_name}${label}')\n self._write('0;JMP')", "def write_labels():\n with open('../data/labels.txt', 'w') as labels_file:\n labels = generate_labels()\n labels_file.write('\\n'.join(labels))", "def write_if(self, label):\n label = self.label_by_scope(label)\n self.pop_stack_to_d()\n self.write(\"@\" + label + \"\\nD;JNE\\n\")", "def make_scoped_label(self, label):\n label = self.label_by_scope(label)\n self.write(\"(\" + label + \")\\n\")\n # write (f$label_name)", "def free_shipping(self, free_shipping):\n\n self._free_shipping = free_shipping", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def label(self, label):\n\n self._label = label", "def touch(self, dst, label=None):\r\n self.write('', dst, label, mode='a')", "def touch(self, dst, label=None):\r\n self.write('', dst, label, mode='a')", "def label(self, message, fg = None, bg = None, bold = None, blink = None):\n self.savepos()\n self.out.write(self._colorize(message, fg, bg, bold, blink))\n self.restorepos()", "def _assign_label(self, format):\n cht_tmpl = self.out_label_tmpl\n return cht_tmpl.substitute(format)", "def write(self, fout: BinaryIO, dflevel: Level, _: Any) -> None:\n with DFWriter(fout, noclose=True) as writer:\n writer.write_level(dflevel)", "def label_users(self):\n record_unit = 1000\n print self.friendship_graph.number_of_nodes()\n print self.friendship_graph.number_of_edges()\n\n for num, node in enumerate(self.friendship_graph.nodes()):\n fake_flag = self.determine_spammer_by_percentage(node)\n self.friendship_graph.node[node]['fake'] = fake_flag\n # print self.friendship_graph.node[node]\n if num % record_unit == 0:\n print num\n print time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))\n nx.write_gpickle(self.friendship_graph, \"graph/firendship_new_label%d.pickle\" % num)\n if num != 0:\n os.remove(\"graph/firendship_new_label%d.pickle\" % (num - record_unit))\n\n nx.write_gpickle(self.friendship_graph, \"graph/firendship_0.8fake_%d.pickle\" % num)", "def touch(self, dst, label=None):\n self.write('', dst, label, mode='a')", "def fl_set_object_label(ptr_flobject, label):\n _fl_set_object_label = library.cfuncproto(\n library.load_so_libforms(), \"fl_set_object_label\", \\\n None, [cty.POINTER(xfdata.FL_OBJECT), xfdata.STRING], \\\n \"\"\"void fl_set_object_label(FL_OBJECT * ob, const char * label) \"\"\")\n library.check_if_flinitialized()\n library.verify_flobjectptr_type(ptr_flobject)\n s_label = library.convert_to_bytestrc(label)\n library.keep_elem_refs(ptr_flobject, label, s_label)\n _fl_set_object_label(ptr_flobject, s_label)", "def write_note(self, note):\n self.output.write_frames(note)\n self.output.sync()", "def write_bgf(self, filename):\n body = [\"BIOGRF{0:>5s}\\n\".format(self.biogrf)]\n if self.descrp:\n body.append(\"DESCRP {0}\\n\".format(self.descrp))\n else:\n body.append(\"DESCRP {0}\\n\".format(filename))\n body.append(\"FORCEFIELD {0}\\n\".format(self.ff))\n body.append(\"FORMAT ATOM (a6,1x,i5,1x,a5,1x,a3,1x,a1,1x,a5,3f10.5\"\n \",1x,a5,i3,i2,1x,f8.5,i2,i4,f10.5)\\n\")\n atoms = []\n hetatms = []\n conect = []\n for atom in self.atoms:\n a, c = atom.writeline()\n if atom.record == 'ATOM':\n atoms.append(a)\n elif atom.record == 'HETATM':\n hetatms.append(a)\n conect.append(c)\n body.extend(atoms)\n body.extend(hetatms)\n body.append(\"FORMAT CONECT (a6,14i6)\\nFORMAT ORDER (a6,i6,13f6.3)\\n\")\n body.extend(conect)\n body.append(\"END\\n\")\n with open(filename, 'w') as f:\n f.writelines(body)", "def slot_debug(self, dummy_gox, (txt)):\r\n self.write(txt)", "def _set_lsp_frr_out_label(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"lsp-frr-out-label\", rest_name=\"lsp-frr-out-label\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_frr_out_label must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"lsp-frr-out-label\", rest_name=\"lsp-frr-out-label\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)\"\"\",\n })\n\n self.__lsp_frr_out_label = t\n if hasattr(self, '_set'):\n self._set()", "def label(self, label: str):\n\n self._label = label", "def create_metering_label(self, body=None):\r\n return self.post(self.metering_labels_path, body=body)", "def set_label(self):\n try:\n self.get_mask()\n except ex.excError:\n pass\n self.label = \"netns %s %s/%s %s@%s\" % (self.mode, self.ipname, to_cidr(self.mask), self.ipdev, self.container_rid)", "def create_label(self, org, name):\n pass", "def printlabel(self, label, plugin=None, destination=None, *args, **kwargs):\n\n if isinstance(label, (LabelPart, LabelStock, LabelLocation)):\n label_id = label.pk\n else:\n label_id = label\n\n # Set URL to use\n URL = f'/label/{self.LABELNAME}/{label_id}/print/'\n\n params = {\n f'{self.LABELITEM}[]': self.pk\n }\n\n if plugin is not None:\n # Append profile\n params['plugin'] = plugin\n\n # If API version less than 130, file download is provided directly\n if self._api.api_version < 130 and plugin is None:\n download_url = URL\n else:\n # Perform API request, get response\n response = self._api.get(URL, params=params)\n download_url = response.get('file', None)\n\n # Label file is availble for download\n if download_url and destination is not None:\n if os.path.exists(destination) and os.path.isdir(destination):\n # No file name given, construct one\n # Otherwise, filename will be something like '?parts[]=37'\n destination = os.path.join(\n destination,\n f'Label_{self.LABELNAME}{label}_{self.pk}.pdf'\n )\n\n # Use downloadFile method to get the file\n return self._api.downloadFile(url=download_url, destination=destination, params=params, *args, **kwargs)\n\n else:\n return response", "def writetif(self,outputname,):\n pass", "def writer(queue):\n led_disp = led.LED()\n while True:\n line = queue.get()\n if line.startswith('$GPVTG'):\n kph = line.split(',')[7]\n if kph == '':\n led_disp.set('NO GPS')\n else:\n led_disp.set('{} kph'.format(kph))", "def write(self, frame):\n super(SerialKISS, self).write(frame.encode_kiss())", "def write_go_to(self, label):\n self._write_line('goto ' + label)", "def _write_labels(self, labels: List[str], labels_path: Path):\n labels_path.write_text(escape_line_delimited_texts(labels))", "def _write_labels(self, labels: List[str], labels_path: Path):\n labels_path.write_text(escape_line_delimited_texts(labels))", "def write_oif(self, oifn):\n print(\"I don't know how to write \" + oifn + \" at this moment.\")", "def dump_label(self, node: Node) -> str:\n\n labelStr = f\"\"\"{{ {{<Inputs>Inputs}}|\n {{ {node.get_kind_name()}\\lname: {node.get_name()} }}|\n {{<Outputs>Outputs}} }}\"\"\"\n return labelStr", "def register_collection_label(self, name, weight):\n save_path = os.path.join(self.session_dir, 'collection_label.txt')\n with open(save_path, 'w') as in_file:\n in_file.write('{}: {}\\n{}: {}\\n'.format('name', name, 'weight', weight))", "def ledgerWrite(self):\n if(self.back == 'y'):\n open(\"ledger.txt\",'a').write(str(num_files) + ' ' + str(self.file_count) + ' ' + self.file_path + ' ' + \"y\" + '\\n')\n else:\n open(\"ledger.txt\",'a').write(str(num_files) + ' ' + str(self.file_count) + ' ' + self.file_path + ' ' + \"n\" + '\\n')\n dbUp.upload_file(\"ledger.txt\", '/ledger.txt')", "def write_label_file(labels_to_class_names, labels_filename):\n with tf.gfile.Open(labels_filename, \"w\") as f:\n for label in labels_to_class_names:\n class_name = labels_to_class_names[label]\n f.write('%d:%s\\n'%(label, class_name))", "def set_label(self):\n try:\n self.get_mask()\n except ex.excError:\n pass\n try:\n self.getaddr()\n addr = self.addr\n except ex.excError:\n addr = self.ipname\n self.label = \"%s/%s %s/%s\" % (addr, to_cidr(self.mask), self.ipdev, self.ipdevExt)\n if self.ipname != addr:\n self.label += \" \" + self.ipname", "def label(self, value):\n\t\tself._label = value", "def write(self, notification):", "def _Name(self, t):\n self.write(t.id)", "def write_if(self, label):\n self._write_line('if-goto ' + label)", "def output_classLabel_to_txt(save_path):\n file_obj = open(save_path,'w')\n length = len(class_label)\n for i in range(0,length):\n line = '%d:%s'%(i,class_label[i])\n file_obj.writelines(line+'\\n')\n return True", "def Print(self,text = \"\"):\n self.Bus.Write_String(self.Address,0x00, text)", "def gracedb_add_label(gracedb_id, label):\n\n # begin GraceDB API\n client = gracedb_rest.GraceDb()\n\n # append comment to GraceDB entry\n out = client.writeLabel(gracedb_id, label)", "def _get_lsp_frr_out_label(self):\n return self.__lsp_frr_out_label", "def writeIf(self, label):\r\n\r\n # Create a list to store all the assembly commands and write them later\r\n translated_commands = []\r\n\r\n # Pops the topmost stack element.\r\n # If it's not zero, jumps to the VM command following the label\r\n translated_commands.append(\"@SP\")\r\n translated_commands.append(\"M=M-1\")\r\n translated_commands.append(\"A=M\")\r\n translated_commands.append(\"D=M\")\r\n translated_commands.append(\"@{}:{}\".format(self.actualFile.upper(), label.upper()))\r\n translated_commands.append(\"D;JNE\")\r\n\r\n # Write all the commands stored in the list and write them on the file.\r\n for line in translated_commands:\r\n self.filename.write(line + \"\\n\")", "def createLabel(self, address: ghidra.program.model.address.Address, name: unicode, namespace: ghidra.program.model.symbol.Namespace, makePrimary: bool, sourceType: ghidra.program.model.symbol.SourceType) -> ghidra.program.model.symbol.Symbol:\n ...", "def set_label(self, value: str = \"nowhere\"):\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"label\"))\r\n self._name = value", "def writeGoto(self, label):\r\n\r\n # Create a list to store all the assembly commands and write them later\r\n translated_commands = []\r\n\r\n # Unconditional jump to the VM command following the label.\r\n translated_commands.append(\"@{}:{}\".format(self.actualFile.upper(), label.upper()))\r\n translated_commands.append(\"0;JMP\")\r\n\r\n # Write all the commands stored in the list and write them on the file.\r\n for line in translated_commands:\r\n self.filename.write(line + \"\\n\")", "def write_to(self, fp):\n fp.write(self.text)", "def write_display(self):\n for i, value in enumerate(self.buffer):\n self.bus.write_byte_data(self.address, i, value)", "def fl_draw_object_label(ptr_flobject):\n _fl_draw_object_label = library.cfuncproto(\n library.load_so_libforms(), \"fl_draw_object_label\", \\\n None, [cty.POINTER(xfdata.FL_OBJECT)], \\\n \"\"\"void fl_draw_object_label(FL_OBJECT * ob)\"\"\")\n library.check_if_flinitialized()\n library.verify_flobjectptr_type(ptr_flobject)\n library.keep_elem_refs(ptr_flobject)\n _fl_draw_object_label(ptr_flobject)", "def write(self, frame):\n super(TCPKISS, self).write(frame.encode_kiss())", "def frequency(self, freq):\n self.load.write(f'TRAN:FREQ {freq}')", "def DrawLabel(self, screen):\r\n screen.blit(self.label, self.pos)", "def write_fasta_line(demultiplexed_seqs_f,\r\n fasta_seq,\r\n label_line,\r\n keep_barcode,\r\n bc_len):\r\n\r\n if keep_barcode:\r\n final_seq = fasta_seq\r\n else:\r\n final_seq = fasta_seq[bc_len:]\r\n\r\n demultiplexed_seqs_f.write(\">%s\\n\" % label_line)\r\n demultiplexed_seqs_f.write(\"%s\\n\" % final_seq)", "def createLabel(self, address: ghidra.program.model.address.Address, name: unicode, makePrimary: bool, sourceType: ghidra.program.model.symbol.SourceType) -> ghidra.program.model.symbol.Symbol:\n ...", "def release(self, req, ino, fi):\r\n self.reply_err(req, 0)", "def on_label(self, payload):\n pass", "def write(self, trollpacket):\n\t\tpacket = self.parse_to_dref(trollpacket.name, trollpacket.value)\n\t\tself.xp_write.send(packet)", "def write_goto(self, label):\n\n symbol = LabelIfGoto.get_label_symbol(self.func_name, label)\n for line in LabelIfGoto.GOTO:\n line = line.format(symbol=symbol)\n self.write_line(line)", "def write(self, fname):\n pass", "def _linked_feature_label(linked_feature):\n\treturn \"\"\"<\n <B>{name}</B><BR />\n F={num_features} D={projected_dim}<BR />\n {fml}<BR />\n <U>{source_translator}</U><BR />\n <I>{source_layer}</I>\n >\"\"\".format(\n\t\tname=linked_feature.name, num_features=linked_feature.size, projected_dim=linked_feature.embedding_dim, fml=linked_feature.fml, source_translator=linked_feature.source_translator, source_layer=linked_feature.source_layer\n\t)", "def write_label_file(labels_to_class_names, dataset_dir, filename='labels.txt'):\n labels_filename = os.path.join(dataset_dir, filename)\n with tf.gfile.Open(labels_filename, 'w') as f:\n for label in labels_to_class_names:\n class_name = labels_to_class_names[label]\n f.write('%d:%s\\n' % (label, class_name))", "def _writeWaveform(self, fo, header='', binary=False):\n # TODO: Write channel data to file\n pass", "def footprint(self):\n return self._label_footprint", "def dumpLabel(g):\n for v in g.nodes():\n g.node[v]['label'] = getLabel(g.node[v])\n if 'attribute' in g.node[v]:\n del g.node[v]['attribute']\n\n for e in g.edges():\n g.edge[e[0]][e[1]]['label'] = getLabel(g.edge[e[0]][e[1]])\n if 'attribute' in g.edge[e[0]][e[1]]:\n del g.edge[e[0]][e[1]]['attribute']", "def createLabel(self, address: ghidra.program.model.address.Address, name: unicode, makePrimary: bool) -> ghidra.program.model.symbol.Symbol:\n ...", "def write_tdm_to_file(pkt):\n\n global g_tdm_cnt\n global g_binfile\n\n if UDP in pkt:\n if pkt[UDP].dport == TDM_PORT:\n f = open(g_binfile, 'a+b')\n f.write(bytes(pkt[UDP].payload))\n f.close()\n g_tdm_cnt += 1\n print(\"\\rTDM Count: {0}. CTRL-C to quit\".format(g_tdm_cnt), end=\" \")", "def SetLabel(self, label):\r\n\r\n self.label = label", "def draw_shape_label(self, label, xform, colour):\n #TODO deal with alignment, rotation\n pos = xform.chain(Point(label.x, label.y))\n self.canvas.text((pos.x, pos.y), label.text, fill=colour)", "def SetLabel(self, label):\n \n self.Freeze()\n self.label_str = label\n self.label.SetLabel(label)\n self.Layout()\n self.Thaw()", "def make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote:\n footnote = nodes.footnote(uri)\n para = nodes.paragraph()\n para.append(nodes.Text(uri))\n footnote.append(para)\n footnote.insert(0, nodes.label('', label))\n doc.note_autofootnote(footnote)\n return footnote", "def write_label(self, contig_name, width, height, font, title_width, upper_left, vertical_label,\n strand, canvas, horizontal_centering=False, center_vertical=False, chop_text=True,\n label_color=(50, 50, 50, 255)):\n upper_left = list(upper_left) # to make it mutable\n shortened = contig_name[-title_width:] # max length 18. Last characters are most unique\n txt = Image.new('RGBA', (width, height))#, color=(0,0,0,50))\n txt_canvas = ImageDraw.Draw(txt)\n text_width = txt_canvas.textsize(shortened, font)[0]\n if not chop_text and text_width > width:\n txt = Image.new('RGBA', (text_width, height)) # TODO performance around txt_canvas\n txt_canvas = ImageDraw.Draw(txt)\n if center_vertical or vertical_label: # Large labels are centered in the column to look nice,\n # rotation indicates strand in big text\n vertically_centered = (height // 2) - multi_line_height(font, shortened, txt)//2\n else: # Place label at the beginning of gene based on strand\n vertically_centered = height - multi_line_height(font, shortened, txt) # bottom\n if strand == \"+\":\n vertically_centered = 0 # top of the box\n txt_canvas.multiline_text((0, max(0, vertically_centered)), shortened, font=font,\n fill=label_color)\n if vertical_label:\n rotation_direction = 90 if strand == '-' else -90\n txt = txt.rotate(rotation_direction, expand=True)\n upper_left[1] += -4 if strand == '-' else 4\n if horizontal_centering:\n margin = width - text_width\n upper_left[0] += margin // 2\n canvas.paste(txt, (upper_left[0], upper_left[1]), txt)", "def write_bytes_fixed(self, b: bytes) -> None:\n self.write(struct.pack(f\"{len(b)}s\", b))", "def format_label(feature):\n label = feature[\"label\"]\n level = ZoningLayer.check_zone(feature, \"M\")\n pattern = ZoningLayer.upattern if level else ZoningLayer.rpattern\n try:\n label = pattern.format(int(feature[\"label\"]))\n except Exception:\n pass\n return label", "def writeKitty(b, of):\n with open(of, \"w\") as fh:\n for r in b:\n fh.write(\"%s 0 0 0 %d %d %d %d 0 0 0 0 0 0 0\\n\" % ( r[0], int(r[1]*dw), int(r[2]*dh), int(r[3]*dw), int(r[4]*dh) ) )", "def setLabel(self, label):\r\n\t\tself.label = label", "def setLabel(self, label):\r\n\t\tself.label = label", "def setLabel(self, label):\r\n\t\tself.label = label", "def print_label(self, package_num=None):\r\n if package_num:\r\n packages = [self.shipment.response.CompletedShipmentDetail.CompletedPackageDetails[package_num]]\r\n else:\r\n packages = self.shipment.response.CompletedShipmentDetail.CompletedPackageDetails\r\n\r\n for package in packages:\r\n label_binary = binascii.a2b_base64(package.Label.Parts[0].Image)\r\n self._print_base64(label_binary)", "def write_if(self, label):\n\n symbol = LabelIfGoto.get_label_symbol(self.func_name, label)\n for line in LabelIfGoto.IF_GOTO:\n line = line.format(symbol=symbol)\n self.write_line(line)", "def write():\n pass" ]
[ "0.6604549", "0.6554617", "0.6488683", "0.59178406", "0.58213866", "0.58023846", "0.5709398", "0.5536997", "0.54463476", "0.5442079", "0.5411397", "0.5353073", "0.5349966", "0.50120467", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.49916384", "0.4970165", "0.4970165", "0.49571764", "0.49430633", "0.49369434", "0.49233925", "0.49231318", "0.49194565", "0.49157882", "0.49025017", "0.4894943", "0.48820072", "0.48640564", "0.4859321", "0.48561972", "0.48478323", "0.48438838", "0.48335424", "0.482632", "0.48174655", "0.48163497", "0.48162222", "0.48162222", "0.48161379", "0.48160258", "0.4802616", "0.47877336", "0.47799262", "0.4773507", "0.4771832", "0.47675925", "0.47518376", "0.4751545", "0.47471526", "0.4746832", "0.47403586", "0.47385263", "0.4736683", "0.47007102", "0.4698575", "0.46985403", "0.4687881", "0.46847886", "0.4681326", "0.46772987", "0.46690917", "0.4655518", "0.4650936", "0.46493065", "0.4648994", "0.46479008", "0.46422932", "0.46378103", "0.46374074", "0.4631037", "0.4625118", "0.4623868", "0.46117464", "0.4606541", "0.46044922", "0.45963115", "0.45958775", "0.45923048", "0.45881665", "0.4586649", "0.45855734", "0.4584325", "0.45804337", "0.45791137", "0.45752233", "0.45752233", "0.45752233", "0.4574003", "0.4572253", "0.45715803" ]
0.5579701
7
Return sparse matrix with edges as an adjacency matrix.
def mesh_edges(tris): if np.max(tris) > len(np.unique(tris)): raise ValueError('Cannot compute connectivity on a selection of ' 'triangles.') npoints = np.max(tris) + 1 ones_ntris = np.ones(3 * len(tris)) a, b, c = tris.T x = np.concatenate((a, b, c)) y = np.concatenate((b, c, a)) edges = coo_matrix((ones_ntris, (x, y)), shape=(npoints, npoints)) edges = edges.tocsr() edges = edges + edges.T return edges
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_adjacency_matrix(graph):\n index_1 = [edge[0] for edge in graph.edges()]\n index_2 = [edge[1] for edge in graph.edges()]\n values = [1 for edge in graph.edges()]\n node_count = max(max(index_1)+1,max(index_2)+1)\n A = sparse.coo_matrix((values, (index_1,index_2)),shape=(node_count,node_count),dtype=np.float32)\n return A", "def getSparseAdjacencyMatrix( graph, attribute=None, transposed=False ):\n if (attribute is not None) and (attribute not in graph.es.attribute_names()):\n raise ValueError( \"Attribute does not exists.\" )\n \n row = []\n col = []\n data = []\n \n if attribute is None:\n if transposed:\n for edge in graph.es():\n s,t = edge.tuple\n row.append(t)\n col.append(s)\n else:\n for edge in graph.es():\n s,t = edge.tuple\n row.append(s)\n col.append(t)\n data = np.ones(len(graph.es()))\n else:\n if transposed:\n for edge in graph.es():\n s,t = edge.tuple\n row.append(t)\n col.append(s)\n else:\n for edge in graph.es():\n s,t = edge.tuple\n row.append(s)\n col.append(t)\n data = np.array(graph.es()[attribute])\n\n return sparse.coo_matrix((data, (row, col)) , shape=(len(graph.vs), len(graph.vs))).tocsr()", "def convert_sparse_to_igraph(indices, matrix):\n # sources, targets = matrix.nonzero()\n # weights = matrix[sources, targets]\n # weights = np.array(weights)[0]\n # print(dir(louvain))\n # ig = igraph.Graph(zip(sources, targets), directed=True,\n # edge_attrs={'weight': weights})\n # return ig\n g = igraph.Graph.Adjacency((matrix > 0).tolist())\n g.es['weight'] = matrix[matrix.nonzero()]\n # g.vs['label'] = node_names # or a.index/a.columns\n return g", "def formAdjacencyMatrix(self):\n self.adjacencyMatrix = dict()\n for i in self.node:\n self.adjacencyMatrix[i] = dict()\n for j in self.node:\n self.adjacencyMatrix[i][j] = 0\n \n for ij in self.link:\n self.adjacencyMatrix[self.link[ij].tail][self.link[ij].head] = 1", "def get_adjacency_matrix(self):\n\n # Get dimension of future matrix\n dim = max([node.value for node in self.nodes])\n\n # Initialize square matrix of zeros\n # Matrix is square and indexes by from, to node values\n adjacency_matrix = [[0 for _ in range(dim+1)] for _ in range(dim+1)]\n\n # Insert edge value at the from, to coordinates\n # That is, fully identify each \"from, edge, to\" triplet\n for edge in self.edges:\n row = edge.node_from.value\n col = edge.node_to.value\n val = edge.value\n\n adjacency_matrix[row][col] = val\n\n # Return matrix of edge values indexed by from, to node values\n return adjacency_matrix", "def adjacency(G, nodelist=None, weight=\"weight\"):\n\n if nodelist is None:\n nodelist = G.nodes()\n\n A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format=\"csr\")\n\n return A", "def adjacency_matrix(g):\n nodes = sorted(g.keys())\n adj = []\n for row_node in nodes:\n row = []\n for column_node in nodes:\n if column_node in g[row_node]:\n row.append(1)\n else:\n row.append(0)\n adj.append(row)\n \n return adj", "def get_adjacency_matrix(self):\n m = zeros(self.size)\n perm = self.array_form\n for i in xrange(self.size - 1):\n m[perm[i], perm[i + 1]] = 1\n return m", "def edges_to_adjacency_matrix(mesh):\n adja = graph.edges_to_coo(mesh.edges,\n data=np.ones(len(mesh.edges),\n dtype=np.int8))\n\n return sparse.triu(adja) + sparse.tril(adja).transpose()", "def create_adjacency_matrix(self, edges):\n matrix = np.zeros([self.max_words, self.max_words * self.edge_types * 2])\n for edge in edges:\n src = edge[0]\n e_type = edge[1]\n dest = edge[2]\n self.set_matrix(matrix, src, dest, e_type, 1)\n return matrix", "def compute_adjacency_matrix(G):\n\n iG = nx.convert_node_labels_to_integers(G)\n adj_list = iG.adjacency_list()\n n_nodes = len(iG.nodes())\n\n adj_mat = np.zeros((n_nodes, n_nodes))\n for x in xrange(n_nodes):\n adj_mat[x, adj_list[x]] = 1\n\n return adj_mat", "def adjaceny_matrix(self):\n \n try:\n return self._adj_matrix\n except AttributeError:\n am = np.zeros((self.n, self.n))\n for edge, weight in self.weights.items():\n am[edge[0], edge[1]] = weight\n self._adj_matrix = am\n return self._adj_matrix", "def adj_matrix(self):\n return nx.adj_matrix(self.network)", "def adjacency( graph : SpatialGraph, \n normalize : bool = True,\n sparse : bool = False\n ) -> np.ndarray :\n if graph.directed:\n raise NotImplementedError(\"Directed graphs are currently not supported.\")\n dtype = np.float if normalize else np.int\n\n adj = np.zeros((graph.num_nodes, graph.num_nodes), dtype=dtype)\n if sparse:\n adj = sp.coo_matrix(adj)\n for node in graph.nodes.values():\n for adj_node in node.neighbours.values():\n adj[node.id, adj_node.id] = 1\n return normalize_adj(adj, sparse) if normalize else adj", "def to_sparse(self):\n from divisi2.sparse import SparseMatrix\n return SparseMatrix(self, self.row_labels, self.col_labels)", "def dense_to_sparse(adj):\n assert adj.dim() >= 2 and adj.dim() <= 3\n assert adj.size(-1) == adj.size(-2)\n\n index = adj.nonzero(as_tuple=True)\n #print(index)\n edge_attr = adj[index]\n\n if len(index) == 3:\n batch = index[0] * adj.size(-1)\n index = (batch + index[1], batch + index[2])\n\n return torch.stack(index, dim=0), edge_attr", "def get_adjacency_matrix(self, rearranged_data):\n data = np.ones(self.num_edges)\n matrix = csr_matrix((data, (rearranged_data['FromNodeId'], rearranged_data['ToNodeId'])),\n shape=(self.num_nodes, self.num_nodes))\n return matrix", "def _compute_adjacency_matrix(self):\n\n # Set up a quick-reference index to map cells to indexes\n for i, cell in enumerate(self.sim.cells):\n self._cell_indexes[cell] = i\n\n if all([self.sim.hub.cells == [self.sim.damaged],\n self.sim.damaged not in self.sim.cells]):\n # Add the \"damaged\" virtual cell to the index if we need it\n self._cell_indexes[self.sim.damaged] = len(self.sim.cells)\n\n node_count = len(list(self._cell_indexes.keys()))\n g_sparse = np.zeros((node_count, node_count), dtype=float)\n g_sparse[:] = np.inf\n\n for cluster in self.sim.clusters + [self.sim.hub]:\n cluster_tour = cluster.tour\n i = len(cluster_tour.vertices) - 1\n j = 0\n while j < len(cluster_tour.vertices):\n start_vertex = cluster_tour.vertices[i]\n stop_vertex = cluster_tour.vertices[j]\n\n start_pt = cluster_tour.points[start_vertex]\n stop_pt = cluster_tour.points[stop_vertex]\n distance = np.linalg.norm(stop_pt - start_pt)\n\n start_seg = cluster_tour.objects[start_vertex]\n stop_seg = cluster_tour.objects[stop_vertex]\n\n start_index = self._cell_indexes[start_seg]\n stop_index = self._cell_indexes[stop_seg]\n\n g_sparse[start_index, stop_index] = distance\n\n i = j\n j += 1\n\n g_sparse = sp.csgraph_from_dense(g_sparse, null_value=np.inf)\n return g_sparse", "def adjacency(self):\n if self.E > 0:\n i = self.edges[:, 0]\n j = self.edges[:, 1]\n adj = coo_matrix((np.ones(self.E), (i, j)),\n shape=(self.V, self.V))\n else:\n adj = coo_matrix((self.V, self.V))\n return adj", "def adjacency(F):\n\n n = F.max()+1\n\n rows = np.concatenate([F[:, 0], F[:, 0],\n F[:, 1], F[:, 1], \n F[:, 2], F[:, 2]])\n\n cols = np.concatenate([F[:, 1], F[:, 2], \n F[:, 0], F[:, 2], \n F[:, 0], F[:, 1]])\n\n combos = np.column_stack([rows, cols])\n\n [_, idx] = np.unique(combos, axis=0, return_index=True)\n A = sparse.csr_matrix((np.ones(len(idx)), (combos[idx, 0], combos[idx, 1])), shape=(n, n))\n\n return A", "def get_adj_matrix(self):\n # This is currently implemented for the case when there are only two edge types (edge and no-edge)\n assert self.Z_edges_logits.shape[1] == 2\n Z_edge_logits = self.Z_edges_logits.detach().cpu().numpy() # [num_edges, 2]\n prob = np.exp(Z_edge_logits) / np.sum(np.exp(Z_edge_logits), axis=-1, keepdims=True) # [num_edges, 2]\n adj_matrix = np.zeros((self.num_nodes, self.num_nodes))\n mask = np.ones((self.num_nodes, self.num_nodes), dtype=bool) & ~np.eye(self.num_nodes, dtype=bool)\n adj_matrix[mask] = prob[:, 1]\n return adj_matrix", "def return_adjacencyMatrix(self):\n return self.__mat", "def edges_to_matrix(edge_list: List[Tuple[int, int]], add_reverse_edges: bool,\n shape: Tuple[int, int], dtype: TypeVar=bool, sparse: bool=True):\n matrix = scipy.sparse.csc_matrix(\n (numpy.ones(len(edge_list)), zip(*edge_list)), dtype=dtype, shape=shape,\n )\n\n if add_reverse_edges:\n matrix = (matrix + matrix.T) > 0\n matrix = matrix.astype(dtype)\n\n if not sparse:\n matrix = matrix.toarray()\n\n return matrix", "def _create_adjacency_matrix(layer_edges):\n A = defaultdict(int)\n for l, edges in list(layer_edges.items()):\n for edge in edges:\n A[(edge[0], edge[1], l)] += 1\n A[(edge[1], edge[0], l)] += 1 \n return A", "def makesparse(matrix):\n n = matrix[0].size\n elements = []\n for i in range(n):\n for j in range(n):\n if matrix[i][j] != 0 :\n temp = MatrixElement(i, j, matrix[i][j])\n elements.append(temp)\n return SparseMatrix(n, elements)", "def adjacencyMatrix(R, edges):\n A = np.zeros((len(R),len(R)))\n for i in range(0, len(edges)):\n A[edges[i][0]][edges[i][1]] = 1\n return A", "def from_nxgraph(G):\n return nx.to_scipy_sparse_matrix(G).astype('float32')", "def adj_matrix(G,nodelist=None,weight='weight'):\n return nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight)", "def to_amat(self, node_list=None, sparse=False) -> np.ndarray:\n if not node_list:\n node_list = sorted(self._nodes)\n node2ix = {node: i for i, node in enumerate(node_list)}\n\n if sparse:\n raise NotImplementedError\n # js, ks = [], []\n # for j, k in self._edges:\n # js.append(j)\n # ks.append(k)\n # js.append(k)\n # ks.append(j)\n # return spmatrix(1, js, ks)\n amat = np.zeros([self.num_nodes, self.num_nodes], dtype=int)\n\n for i, j in self._edges:\n amat[node2ix[i], node2ix[j]] = True\n amat[node2ix[j], node2ix[i]] = True\n return amat", "def adjacency_matrix(edge_index: nb.int64[:,:],\n n: nb.int64) -> nb.boolean[:,:]:\n adj_mat = np.eye(n, dtype=np.bool_)\n for e in edge_index:\n adj_mat[e[0],e[1]] = True\n return adj_mat", "def get_sparse_adj(ent_num, triples):\n adj, degree = get_mat(ent_num, triples)\n\n indices = []\n values = []\n\n for fir, sec in adj:\n indices.append((sec, fir))\n values.append(adj[(fir, sec)] / math.sqrt(degree[fir]) / math.sqrt(degree[sec]))\n\n indices = torch.tensor(indices).t()\n\n adj = torch.sparse_coo_tensor(indices=indices, values=values, size=[ent_num, ent_num])\n\n return adj, degree", "def vertex_adjacency_matrix(self):\n if '_V_adjacency_matrix' not in self.__dict__:\n self._init_vertex_adjacency_matrix()\n return self._V_adjacency_matrix;", "def _create_distance_matrix(mesh):\n l = len(mesh.faces)\n\n faces = polygons(mesh.faces, mesh.vertices, mesh.face_normals, mesh.area_faces)\n # map from edge-key to adjacent faces\n adj_faces_map = {}\n # find adjacent faces by iterating edges\n for index, face in enumerate(faces):\n for edge in face.edge_keys:\n if (edge[0] > edge[1]):\n new_edge = (edge[1], edge[0])\n else:\n new_edge = (edge[0], edge[1])\n if new_edge in adj_faces_map:\n adj_faces_map[new_edge].append(index) # ไธ€ๅฏนๅคš\n else:\n adj_faces_map[new_edge] = [index]\n\n # helping vectors to create sparse matrix later on\n row_indices = []\n col_indices = []\n Gval = [] # values for matrix of angular distances\n Aval = [] # values for matrix of geodesic distances\n # iterate adjacent faces and calculate distances\n for edge, adj_faces in adj_faces_map.items():\n if len(adj_faces) == 2:\n i = adj_faces[0]\n j = adj_faces[1]\n # ไธ€ๆก่พน่ฟžๆŽฅ็š„ไธคไธช้ข\n Gtemp = _geodesic_distance(mesh, faces[i], faces[j], edge) # ๆต‹ๅœฐ่ท็ฆป\n Atemp = _angular_distance(mesh, faces[i], faces[j]) # ่ง’่ท็ฆป # ๅ…ถๅฎžๆ˜ฏไฝ™ๅผฆ่ท็ฆป\n Gval.append(Gtemp)\n Aval.append(Atemp)\n row_indices.append(i)\n col_indices.append(j)\n # add symmetric entry\n Gval.append(Gtemp)\n Aval.append(Atemp)\n row_indices.append(j)\n col_indices.append(i)\n\n elif len(adj_faces) > 2:\n print(\"Edge with more than 2 adjacent faces: \" + str(adj_faces) + \"!\")\n\n Gval = numpy.array(Gval)\n Aval = numpy.array(Aval)\n # deltaๆ˜ฏๅŽปๅ…จๅฑ€ๅ˜้‡๏ผŒๅค–้ƒจไผ ๅ…ฅ็š„\n values = delta * Gval / numpy.mean(Gval) + \\\n (1.0 - delta) * Aval / numpy.mean(Aval)\n\n # create sparse matrix\n distance_matrix = scipy.sparse.csr_matrix(\n (values, (row_indices, col_indices)), shape=(l, l))\n return distance_matrix", "def expand_csr_adj(adj, count:int):\n r,c = adj.shape\n \n adj = sp.vstack(\n [adj, sp.csr_matrix(np.zeros((count, c)))])\n adj = sp.hstack(\n [adj, sp.csr_matrix(np.zeros((r + count, count)))])\n \n return adj", "def adjacency_matrices_(self):\n return self._adjacency_matrices", "def adjacency_matrices_(self):\n return self._adjacency_matrices", "def make_sparse(data):\n assert data.train_pos_edge_index is not None\n\n (row, col), N = data.train_pos_edge_index, data.num_nodes\n perm = (col * N + row).argsort()\n row, col = row[perm], col[perm]\n\n value = [data.edge_id[(row[i] * N + col[i]).item()].item() for i in perm]\n\n data.adj_t = SparseTensor(\n row=col,\n col=row,\n value=torch.tensor(value, dtype=torch.float32),\n sparse_sizes=(N, N),\n is_sorted=True,\n )\n\n # Pre-process some important attributes.\n data.adj_t.storage.rowptr()\n data.adj_t.storage.csr2csc()\n\n return data", "def to_coo_matrix(self):\n if self.E > 0:\n i, j = self.edges.T\n sm = coo_matrix((self.weights, (i, j)), shape=(self.V, self.V))\n else:\n sm = coo_matrix((self.V, self.V))\n return sm", "def from_sparse_matrix(self, matrix, node_names=None, directed=False, *args, **kwargs):\n\t\tN = list()\n\t\tE = dict()\n\t\tneighbours = dict()\n\n\t\t# Assert Square Adjacency Matrix\n\t\tif matrix.shape[0] != matrix.shape[1]:\n\t\t\traise ValueError('Adjacency Matrix not square')\n\n\t\tN = list( np.arange(matrix.shape[0]) )\n\t\tneighbours = {i:[] for i in np.arange(matrix.shape[0])}\n\t\t#\n\t\trows,cols = matrix.nonzero()\n\t\tfor i,j in zip(rows,cols):\n\t\t\t# the diagonal is (must be) always zero (distance = 0)\n\t\t\tif i==j:\n\t\t\t\tcontinue\n\t\t\t# infinite distance doesn't have to be calculated\n\t\t\telif matrix[i,j] == np.inf:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tE[ (i,j) ] = float(matrix[i,j])\n\t\t\t\tneighbours[i].append(j)\n\n\t\treturn Dijkstra(N, E, neighbours, node_names, directed, *args, **kwargs)", "def adjacencyMatrixplot(nodes):\n adMat = np.zeros((len(nodes), len(nodes)), int)\n for node in nodes:\n if (node.id == 0):continue\n parent, child = node.parent, node.id # -1 -> tally with list indices\n adMat[parent, child] = 1\n return adMat", "def build_lhs_matrix(self):\n j=3\n diags1 = self.build_linear_diags()\n diags1 += self.build_dispersion_diags()\n\n # Ones down primary diagonal\n diags2 = np.zeros_like(diags1)\n diags2[j,:] = 1.\n\n cff = self.dt*(1+self.c_im)*0.5 \n diags = diags2 - cff*diags1\n \n # Build the sparse matrix\n cols = [ii for ii in range(-self._j, self._j+1)]\n M = sparse.spdiags(diags, cols, self.N, self.N)\n\n return M, diags", "def _from_dict_to_sparse(self, adj_dict):\n indices = list(adj_dict.keys())\n values = [1] * len(indices)\n\n edge_index = torch.LongTensor(indices).T.to(self.device)\n edge_attr = torch.FloatTensor(values).to(self.device)\n\n edge_index, edge_attr = utils.to_symmetric(edge_index, edge_attr, self.n)\n\n return SparseTensor.from_edge_index(edge_index=edge_index,\n edge_attr=edge_attr,\n sparse_sizes=torch.Size([self.n, self.n]))", "def to_coo_matrix(self):\n if self.E > 0:\n i, j = self.edges.T\n sm = coo_matrix((np.ones(self.E), (i, j)),\n shape=(self.V, self.V))\n else:\n sm = coo_matrix((self.V, self.V))\n return sm", "def adjacency_matrix():\n file_path = PROJECT_PATH + \"/geographycal_data/adjacency_matrix/Howgrp.txt\"\n router = Router(adjacency_metrix=file_path)\n # router.write2vtk(router.graph, \"adjacency_matrix\")\n # nx.draw(router.graph)\n # plt.show()\n # adjacency matrix\n A = nx.adjacency_matrix(router.graph, weight=None).toarray()\n # ... and its spectrum\n nx.adjacency_spectrum(router.graph, weight=None)\n # weighted adjacency\n W = nx.adjacency_matrix(router.graph)\n # D\n I = np.reshape(np.ones(12), (-1, 1))\n D = np.matmul(A, I)\n # combinatorial graph Laplacian L = D - A\n L = nx.laplacian_matrix(router.graph, weight=None)\n # ... and his spectrum\n nx.laplacian_spectrum(router.graph, weight=None)\n # weighted Laplacian\n Y = nx.laplacian_matrix(router.graph)\n\n # Note\n sumD = np.matmul(I.transpose(), D)\n sumD = sumD[0][0]\n sumA = 0\n for row in np.nditer(A):\n for e in np.nditer(row):\n sumA += e\n\n # Fielder vector\n fiedler_vector = nx.fiedler_vector(router.graph, weight=None)\n\n # Matrix Double index Sum\n\n def D_app(F):\n return D * F\n\n def A_app(F):\n AF = np.zeros(len(F))\n for i, e_i in enumerate(F):\n for j, e_j in enumerate(F):\n if (A[i][j] != 0):\n AF[i] += F[j]\n return AF", "def adjacency_matrix(cluster_pred):\n #print('adjacency start')\n x = cluster_pred.copy()\n if(len(x.shape) == 1):\n x = x[:, np.newaxis]\n # Force the cluster indexing to be positive integers\n if(x.min() <= 0):\n x += -x.min() + 1\n\n A = np.dot(x**-1., x.T) == 1\n #print('adjacency end')\n return A", "def _init_vertex_adjacency_matrix(self, verbose=False):\n self._init_from_cdd_input(self.cdd_Vrepresentation(),\n '--adjacency', verbose)", "def adjacency(dist, idx):\n M, k = dist.shape\n assert M, k == idx.shape\n assert dist.min() >= 0\n # Weights.\n sigma2 = np.mean(dist[:, -1]) ** 2\n #print sigma2\n dist = np.exp(- dist ** 2 / sigma2)\n\n # Weight matrix.\n I = np.arange(0, M).repeat(k)\n J = idx.reshape(M * k)\n V = dist.reshape(M * k)\n W = scipy.sparse.coo_matrix((V, (I, J)), shape=(M, M))\n # No self-connections.\n W.setdiag(0)\n\n # Non-directed graph.\n bigger = W.T > W\n W = W - W.multiply(bigger) + W.T.multiply(bigger)\n return W", "def construct_4adj_graph(X):\n s0, s1, s2 = X.shape\n \n size_data = s0*s1\n \n xGrid, yGrid = np.meshgrid(np.arange(s0), np.arange(s1))\n totGrid = (xGrid*s1 + yGrid).transpose()\n \n horiz_edges = np.sqrt(np.sum((X[1:,:,:] - X[:-1,:,:])**2, axis=-1).flatten())\n indx_horiz_edges = totGrid[1:,:].flatten()\n indy_horiz_edges = totGrid[:-1,:].flatten()\n \n vert_edges = np.sqrt(np.sum((X[:,1:,:] - X[:,:-1,:])**2, axis=-1).flatten())\n indx_vert_edges = totGrid[:,1:].flatten()\n indy_vert_edges = totGrid[:,:-1].flatten()\n \n w = np.concatenate((horiz_edges, vert_edges), axis=0) + 1e-6\n u = np.concatenate((indx_horiz_edges, indx_vert_edges), axis=0)\n v = np.concatenate((indy_horiz_edges, indy_vert_edges), axis=0)\n \n return sp.sparse.csr_matrix((w,(u,v)), shape=(size_data, size_data))", "def vec2adjmat(source, target, symmetric=True):\n df = pd.DataFrame(np.c_[source, target], columns=['source', 'target'])\n # Make adjacency matrix\n adjmat = pd.crosstab(df['source'], df['target'])\n # Get all unique nodes\n # nodes = np.unique(np.c_[adjmat.columns.values, adjmat.index.values].flatten())\n nodes = np.unique(list(adjmat.columns.values) + list(adjmat.index.values))\n\n # Make the adjacency matrix symmetric\n if symmetric:\n # Add missing columns\n node_columns = np.setdiff1d(nodes, adjmat.columns.values)\n for node in node_columns:\n adjmat[node]=0\n\n # Add missing rows\n node_rows = np.setdiff1d(nodes, adjmat.index.values)\n adjmat=adjmat.T\n for node in node_rows:\n adjmat[node]=0\n adjmat=adjmat.T\n\n # Sort to make ordering of columns and rows similar\n [IA, IB] = ismember(adjmat.columns.values, adjmat.index.values)\n adjmat = adjmat.iloc[IB, :]\n adjmat.index.name='source'\n adjmat.columns.name='target'\n\n return(adjmat)", "def vec2adjmat(source, target, symmetric=True):\n df = pd.DataFrame(np.c_[source, target], columns=['source', 'target'])\n # Make adjacency matrix\n adjmat = pd.crosstab(df['source'], df['target'])\n # Get all unique nodes\n # nodes = np.unique(np.c_[adjmat.columns.values, adjmat.index.values].flatten())\n nodes = np.unique(list(adjmat.columns.values) + list(adjmat.index.values))\n\n # Make the adjacency matrix symmetric\n if symmetric:\n # Add missing columns\n node_columns = np.setdiff1d(nodes, adjmat.columns.values)\n for node in node_columns:\n adjmat[node]=0\n\n # Add missing rows\n node_rows = np.setdiff1d(nodes, adjmat.index.values)\n adjmat=adjmat.T\n for node in node_rows:\n adjmat[node]=0\n adjmat=adjmat.T\n\n # Sort to make ordering of columns and rows similar\n [IA, IB] = ismember(adjmat.columns.values, adjmat.index.values)\n adjmat = adjmat.iloc[IB, :]\n adjmat.index.name='source'\n adjmat.columns.name='target'\n\n return(adjmat)", "def adj_mat(self):\n return self._adj_mat", "def internal_adjacency(self, node_list):\n # Create igraph Graph object describing the subgraph\n subgraph = self.graph.subgraph(node_list)\n # Get adjacency matrix\n return np.array(subgraph.get_adjacency(type=2).data).astype(np.int8)", "def makeGeneralizedAdjacencyMatrix( adjacencyMatrix, sigma = 1/2 ):\n n = adjacencyMatrix.shape[0]\n D = np.sum( adjacencyMatrix, axis=0 )\n\n \n D1 = sp.sparse.lil_matrix( ( n, n ) ) #Will correspond to D^{-sigma}\n D1_vector = ( np.power( abs( D ), - float( sigma ) ) )\n for i in range(n):\n D1[i,i] = D1_vector[i]\n D1 = sp.sparse.dia_matrix( D1 )\n \n D2 = sp.sparse.lil_matrix( ( n, n ) ) #will correspond to D^{sigma-1}\n D2_vector = ( np.power( abs( D ), float( sigma - 1 ) ) ) \n for i in range(n):\n D2[i,i] = D2_vector[i]\n D2 = sp.sparse.dia_matrix( D2 )\n\n return D1 @ sp.sparse.csr_matrix( adjacencyMatrix ) @ D2", "def Adjacency(graph,digraph=False): \n N = len(graph.nodes)\n adj = np.zeros((N,N))\n edges = graph.edges\n for a,b in edges:\n adj[a,b] = 1\n if not digraph:\n adj[b,a] = 1\n return adj", "def get_graph(adj):\n # remove all zeros rows and columns\n adj = adj[~np.all(adj == 0, axis=1)]\n adj = adj[:, ~np.all(adj == 0, axis=0)]\n adj = np.asmatrix(adj)\n G = nx.from_numpy_matrix(adj)\n return G", "def calculate_connectivity_matrix(molecule, element_diagonal=False):\n num_atoms = molecule.GetNumAtoms()\n adjacency = np.zeros((num_atoms, num_atoms))\n for bond in molecule.GetBonds():\n bond_type = str(bond.GetBondType()).lower()\n bond_order = get_bond_order(bond_type)\n i = bond.GetBeginAtomIdx()\n j = bond.GetEndAtomIdx()\n adjacency[i, j] = bond_order\n adjacency[j, i] = bond_order\n if element_diagonal:\n for i, atom in enumerate(molecule.GetAtoms()):\n adjacency[i, i] = atom.GetAtomicNum()\n return adjacency", "def edge2adj(edge_index,edge_weight,num_nodes):\n adj = torch.sparse.FloatTensor(edge_index, edge_weight, torch.Size([num_nodes,num_nodes]))\n return adj", "def A_matrix(self,type_cost=Edge.given_cost):\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix", "def get_adjacency_matrix(node_list: List[Node], graph: Graph):\n node_to_index = {node: index for index, node in enumerate(node_list)}\n adjacency_matrix = numpy.zeros((len(node_list), len(node_list)), dtype=int)\n for node in node_list:\n for dependency in graph[node]:\n if dependency != node:\n adjacency_matrix[\n node_to_index[node],\n node_to_index[dependency]\n ] = 1\n return adjacency_matrix", "def _generate_adjacency_matrices(self):\n self.adj_matrices = dict()\n mes = []\n args = []\n for metaedge in self.metaedges:\n mes.append(metaedge)\n args.append(self._prepare_parallel_adj_matrix_args(self.edge_df.query('abbrev == @metaedge')))\n res = parallel_process(array=args, function=mt.get_adj_matrix, use_kwargs=True, n_jobs=self.n_jobs,\n front_num=0)\n for metaedge, matrix in zip(mes, res):\n self.adj_matrices[metaedge] = matrix", "def as_igraph(self):\n if not self.igraph_representation:\n A = self.connectivity_matrix.values\n g = igraph.Graph.Adjacency((A>0).tolist())\n g.es['weight'] = A[A.nonzero()]\n g.vs['name'] = self.connectivity_matrix.columns\n self.igraph_representation = g\n return self.igraph_representation", "def construct_graph(indices, distances, n):\n CSR_graph = scipy.sparse.csr_matrix((distances, [indices[:, 0], indices[:, 1]]), shape=(n, n))\n return CSR_graph", "def to_incidence_matrix(self) -> IncidenceMatrix:\n inc_m = [[0 for _ in range(len(self.edges))] for _ in range(len(self))]\n for i, edge in enumerate(self.edges):\n n1 = edge.begin\n n2 = edge.end\n\n inc_m[n1 - 1][i] = -edge.weight\n inc_m[n2 - 1][i] = edge.weight\n\n return inc_m", "def build_graph_from_sparse_matrix(gdf, matrix, graph=None):\n\n n, m = matrix.shape\n assert(n == m)\n\n if graph is None:\n logger.info(\"Generating new graph from dataframe.\")\n\n graph = nx.DiGraph()\n for i in range(n):\n name = gdf.loc[i]['name']\n graph_add_node(graph, name)\n\n I, J, V = sparse.find(matrix)\n N = I.size\n\n for k in range(N):\n i = I[k]\n j = J[k]\n v = V[k]\n name_i = gdf.loc[i]['name']\n name_j = gdf.loc[j]['name']\n graph_increment_edge(graph, name_i, name_j, v)\n\n return graph", "def make_adjacency_matrix(X, metric=\"correlation\", n_neighbors=6, n_jobs=1):\n knn = NearestNeighbors(n_neighbors=n_neighbors,\n metric=metric,\n algorithm=\"brute\",\n n_jobs=n_jobs,\n ).fit(X)\n\n adjacency_matrix = knn.kneighbors_graph(X,\n mode=\"distance\",\n ).toarray()\n\n return adjacency_matrix", "def _create_affinity_matrix(mesh):\n\n l = len(mesh.faces)\n print(\"mesh_segmentation: Creating distance matrices...\")\n distance_matrix = _create_distance_matrix(mesh)\n\n print(\"mesh_segmentation: Finding shortest paths between all faces...\")\n # for each non adjacent pair of faces find shortest path of adjacent faces\n W = scipy.sparse.csgraph.dijkstra(distance_matrix)\n inf_indices = numpy.where(numpy.isinf(W)) # ไธๅฏ่พพ็š„\n W[inf_indices] = 0 # ๆ ‡่ฎฐไธๅฏ่พพ็š„\n\n print(\"mesh_segmentation: Creating affinity matrix...\")\n # change distance entries to similarities\n sigma = W.sum() / (l ** 2) # ๅนณๆ–น\n den = 2 * (sigma ** 2)\n W = numpy.exp(-W / den)\n W[inf_indices] = 0\n numpy.fill_diagonal(W, 1) # ๅ……ไปปๆ„็ปดๅบฆ็š„ๆ•ฐ็ป„็š„ไธปๅฏน่ง’็บฟใ€‚\n\n return W", "def adjacency(dist, idx):\n M, k = dist.size()\n # assert M, k == idx.shape\n # assert dist.min() >= 0\n\n # Weights.\n sigma2 = torch.mean(dist[:, -1])**2\n dist = torch.exp(- dist**2 / sigma2)\n\n # Weight matrix.\n I = torch.arange(0, M).repeat_interleave(k).contiguous().view(1, -1).cuda()\n J = idx.contiguous().view(1, -1)\n V = dist.contiguous().view(-1)\n indices = torch.cat([I, J], dim=0)\n W = torch.sparse.FloatTensor(indices, V, torch.Size([M, M])).cuda()\n # W = scipy.sparse.coo_matrix((V.cpu().numpy(), (I.cpu().numpy(), J.cpu().numpy())), shape=(M, M))\n\n # No self-connections.\n # W.setdiag(1)\n\n # Non-directed graph.\n # bigger = W.T > W\n # W = W - W.multiply(bigger) + W.T.multiply(bigger)\n #\n # assert W.nnz % 2 == 0\n # assert np.abs(W - W.T).mean() < 1e-10\n # assert type(W) is scipy.sparse.csr.csr_matrix\n return W", "def get_feedforward_adj_mat(num_layers):\n ret = dok_matrix((num_layers, num_layers))\n for i in range(num_layers - 1):\n ret[i, i + 1] = 1\n return ret", "def to_sparse(self):\n if self.rep.fmt == 'sparse':\n return self\n\n return self.from_rep(self.rep.to_sdm())", "def sparse_matlab(i, j, v, m, n):\n return csr_matrix((v, (i, j)), shape=(m, n))", "def _build_sparse_matrix(L):\n shape = L.shape\n i = torch.LongTensor(np.vstack((L.row, L.col)).astype(int))\n v = torch.FloatTensor(L.data)\n return torch.sparse.FloatTensor(i, v, torch.Size(shape))", "def tocsr(self):\n\n indptr = np.asarray([len(x) for x in self.rows], dtype=np.intc)\n indptr = np.concatenate( (np.array([0], dtype=np.intc), np.cumsum(indptr)) )\n\n nnz = indptr[-1]\n\n indices = []\n for x in self.rows:\n indices.extend(x)\n indices = np.asarray(indices, dtype=np.intc)\n\n data = []\n for x in self.data:\n data.extend(x)\n data = np.asarray(data, dtype=self.dtype)\n\n from csr import csr_matrix\n return csr_matrix((data, indices, indptr), shape=self.shape)", "def _get_vertex_face_adjacency(self, data=None):\n # Input checks:\n nv = self.vertices.shape[0]\n f = self.faces # Convert to an ndarray or pass if already is one\n # Computation\n row = f.reshape(-1) # Flatten indices\n col = np.tile(np.arange(len(f)).reshape((-1, 1)), (1, f.shape[1])).reshape(-1) # Data for vertices\n shape = (nv, len(f))\n\n if not data:\n data = np.ones(len(col), dtype=np.bool)\n\n # assemble into sparse matrix\n return coo_matrix((data, (row, col)), shape=shape, dtype=data.dtype)", "def adjacency_matrix(self, input=None,\n entry=None):\n from sage.rings.integer_ring import ZZ\n\n def default_function(transitions):\n x = sage.symbolic.ring.SR.var('x')\n return x**sum(transition.word_out)\n\n if entry is None:\n entry = default_function\n\n relabeledFSM = self\n l = len(relabeledFSM.states())\n for state in self.iter_states():\n if state.label() not in ZZ or state.label() >= l \\\n or state.label() < 0:\n relabeledFSM = self.relabeled()\n break\n dictionary = {}\n for transition in relabeledFSM.iter_transitions():\n if input is None or transition.word_in == [input]:\n if (transition.from_state.label(),\n transition.to_state.label()) in dictionary:\n dictionary[(transition.from_state.label(),\n transition.to_state.label())] \\\n += entry(transition)\n else:\n dictionary[(transition.from_state.label(),\n transition.to_state.label())] \\\n = entry(transition)\n return sage.matrix.constructor.matrix(\n len(relabeledFSM.states()), dictionary)", "def _square_adjacency_matrix_to_edgelist(matrix, indices_to_ids):\n\n\tdf_of_matrix = pd.DataFrame(matrix)\t\t\t\t\t\t\t\t\t# Convert the numpy array to a pandas dataframe.\n\tboolean_triu = np.triu(np.ones(df_of_matrix.shape)).astype(np.bool)\t# Create a boolean array of same shape where upper triangle is true.\n\tdf_of_matrix = df_of_matrix.where(boolean_triu)\t\t\t\t\t\t# Make everything but the upper triangle NA so it is ignored by stack.\n\tmelted_matrix = df_of_matrix.stack().reset_index()\t\t\t\t\t# Melt (stack) the array so the first two columns are matrix indices.\n\tmelted_matrix.columns = [\"from\", \"to\", \"value\"]\t\t\t\t\t\t# Rename the columns to indicate this specifies a graph.\n\tmelted_matrix[\"from\"] = pd.to_numeric(melted_matrix[\"from\"])\t\t# Make sure node names are integers because IDs have to be integers.\n\tmelted_matrix[\"to\"] = pd.to_numeric(melted_matrix[\"to\"])\t\t\t# Make sure node names are integers because IDs have to be integers.\n\tmelted_matrix[\"from\"] = melted_matrix[\"from\"].map(indices_to_ids)\t# Rename the node names to be IDs from the dataset not matrix indices.\n\tmelted_matrix[\"to\"] = melted_matrix[\"to\"].map(indices_to_ids)\t\t# Rename the node names to be IDS from the dataset not matrix indices.\n\treturn(melted_matrix)\t\t\t\t\t\t\t\t\t\t\t\t# Return the melted matrix that looks like an edge list.", "def convert_to_dense_graph(self) -> cjg.Dense:\n N = len(self.indices)\n ising_int = self.ising_interactions()\n\n # cxxjij.graph.dense\n cxx_dense_ising = cjg.Dense(N)\n for i in range(N):\n if ising_int[i,i] != 0.0:\n cxx_dense_ising[i,i] = ising_int[i,i]\n for j in range(i+1, N):\n if ising_int[i,j] != 0.0:\n cxx_dense_ising[i,j] = ising_int[i,j]\n \n return cxx_dense_ising", "def to_sparse(self):\n from divisi2.sparse import SparseVector\n return SparseVector(self, self.labels)", "def get_adjacency_matrix_from_et(et, debug = False):\n am = numpy.zeros([et.nodes.num_nds, et.nodes.num_nds], dtype=numpy.uint8)\n for i in range(et.nodes.num_nds):\n parents = et.nodes[i].parents.display()\n if debug:\n print 'node ', i, parents\n for j in range(len(parents)):\n am[parents[j], i] = 1\n return am", "def preprocess_adj(adj):\n adj = nx.adjacency_matrix(nx.from_dict_of_lists(adj)) # return a adjacency matrix of adj ( type is numpy)\n adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0])) #\n # return sparse_to_tuple(adj_normalized)\n return adj_normalized.todense()", "def build_adjacency(self, connectivity_matrix, n_neighbours):\n \n #Get k strongest connections\n connections, idx = self.get_k_strongest_connections(connectivity_matrix, n_neighbours)\n \n #Sparse matrix\n M, k = connections.shape\n assert M, k == idx.shape #M - number of vertices. k == nearest neighbours \n assert connections.min() >= 0\n\n # Weights.\n sigma2 = np.mean(connections[:, -1])**2\n connections = np.exp(- connections**2 / sigma2)\n\n # Weight matrix.\n I = np.arange(0, M).repeat(k) #row\n J = idx.reshape(M*k) #col\n V = connections.reshape(M*k) #data \n W = sparse.coo_matrix((V, (I, J)), shape=(M, M)) #COO is a fast format for constructing sparse matrices\n\n # No self-connections.\n W.setdiag(0)\n\n # Non-directed graph.\n bigger = W.T > W\n W = W - W.multiply(bigger) + W.T.multiply(bigger)\n return W", "def preprocess_adj(adj):\n adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0]))\n return sparse_to_tuple(adj_normalized)", "def preprocess_adj(adj):\n adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0]))\n return sparse_to_tuple(adj_normalized)", "def preprocess_adj(adj):\n adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0]))\n return sparse_to_tuple(adj_normalized)", "def preprocess_adj(adj):\n adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0]))\n return sparse_to_tuple(adj_normalized)", "def preprocess_adj(adj):\n adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0]))\n return sparse_to_tuple(adj_normalized)", "def gradop(adj: np.ndarray) -> sp.csr_matrix:\n\n e = np.array(adj.nonzero())\n ne = e.shape[1]\n nv = adj.shape[0]\n i, j, x = np.tile(range(ne), 2), e.flatten(), np.repeat([-1, 1], ne)\n\n return sp.csr_matrix((x, (i, j)), shape=(ne, nv))", "def _build_adjacency_matrix_1(self):\n\n from scipy import sparse as sparse\n \n down_neighbour = np.empty(self.tri.npoints)\n\n for node in range (0,self.tri.npoints):\n down_neighbour[node] = self.neighbour_array_lo_hi[node][0]\n\n # Build a matrix of downhill-ness - one entry per node ! \n \n size = self.tri.npoints\n row_array = np.empty(size)\n col_array = np.empty(size)\n down_array = np.ones(size)\n\n # Catch cases where node is local low point (i.e. it is its own low neighbour)\n\n for row in range(0, self.tri.npoints): \n row_array[row] = row\n col_array[row] = down_neighbour[row]\n if row == down_neighbour[row]:\n down_array[row] = 0.0\n \n\n downMCOO = sparse.coo_matrix( (down_array, (row_array, col_array)), shape=(size,size) ).T \n\n self.adjacency1 = downMCOO.tocsr() \n\n # Catch pathological cases - sometimes if there is a flat spot on the boundary, then \n # the filling method above will produce a non-square matrix. This is caused by\n # repetition of values in the COO list which are summed on conversion.\n\n if downMCOO.shape[0] != downMCOO.shape[1]:\n # This approach works but is a lot slower\n\n print \"\"\"\n Warning: the downhill matrices require a slow build method. This is probably\n Because there are degeneracies in the slope - particularly at the boundaries\n A small random perturbation is usually enough to fix this problem\n \"\"\"\n downMat = sparse.lil_matrix((size, size))\n\n for row in range(0, self.tri.npoints): \n downMat[down_neighbour[row],row] = 1.0\n\n for row in range(0, self.tri.npoints): \n if down_neighbour[row] == row:\n downMat[row,row] = 0.0\n \n self.adjacency1 = downMat.T.tocsr() \n \n return", "def normalize_adj( adj : np.ndarray, \n sparse : bool = False\n ) -> Union[np.ndarray, sp.spmatrix]:\n if sparse:\n adj = sp.coo_matrix(adj) # [N,N]\n rowsum = np.array(adj.sum(1)) # [N,]\n \n d_inv_sqrt = np.power(rowsum, -0.5) # [N,], may issue runtime warnings (div by zero)\n d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0. # []\n d_mat_inv_sqrt = sp.diags(d_inv_sqrt) if sparse else np.diag(d_inv_sqrt) #[N,N]\n \n if sparse:\n return adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).tocoo()\n else:\n return ((adj @ d_mat_inv_sqrt).transpose() @ d_mat_inv_sqrt) # not quite sure why this order = D^T A^T D, D^T = D, A^T = A - the transpose is unncessary?!", "def _identity_sparse(d, stype=\"csr\", dtype=complex):\n return sp.eye(d, dtype=dtype, format=stype)", "def feature_calculator(args, graph):\n index_1 = [edge[0] for edge in graph.edges()]\n index_2 = [edge[1] for edge in graph.edges()]\n values = [1 for edge in graph.edges()]\n node_count = max(max(index_1)+1,max(index_2)+1)\n adjacency_matrix = sparse.coo_matrix((values, (index_1,index_2)),shape=(node_count,node_count),dtype=np.float32)\n degrees = adjacency_matrix.sum(axis=0)[0].tolist()\n degs = sparse.diags(degrees, [0])\n normalized_adjacency_matrix = degs.dot(adjacency_matrix)\n target_matrices = [normalized_adjacency_matrix.todense()]\n powered_A = normalized_adjacency_matrix\n if args.window_size > 1:\n for power in tqdm(range(args.window_size-1), desc = \"Adjacency matrix powers\"):\n powered_A = powered_A.dot(normalized_adjacency_matrix)\n to_add = powered_A.todense()\n target_matrices.append(to_add)\n target_matrices = np.array(target_matrices)\n return target_matrices", "def adjacency_spectrum(G, weight=\"weight\"):\n import scipy as sp\n\n return sp.linalg.eigvals(nx.adjacency_matrix(G, weight=weight).todense())", "def _get_tsp_matrix(graph: networkx.Graph) -> np.ndarray:\n number_of_nodes = len(graph)\n matrix = np.zeros((number_of_nodes, number_of_nodes))\n for i in nx.all_pairs_dijkstra_path_length(graph, weight=\"weight\"):\n distance_dist = i[1]\n for j in distance_dist.items():\n matrix[i[0] - 1][j[0] - 1] = j[1]\n matrix[j[0] - 1][i[0] - 1] = matrix[i[0] - 1][j[0] - 1]\n return matrix", "def adjacency_matrix_to_dict_graph(adjacency_matrix):\n\n # Check if the matrix has the right shape\n number_row_edges = len(adjacency_matrix)\n number_col_edges = len(adjacency_matrix[0])\n assert len(adjacency_matrix) == len(adjacency_matrix[0]), \\\n f\"Expected number of row = number of columns. {number_row_edges}\" \\\n f\" rows and {number_col_edges} columns found.\"\n\n return {i: (np.nonzero(row)[0]).tolist() for\n i, row in enumerate(adjacency_matrix)}", "def sym_adj(adj):\n adj = sp.coo_matrix(adj)\n rowsum = np.array(adj.sum(1))\n d_inv_sqrt = np.power(rowsum, -0.5).flatten()\n d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0.\n d_mat_inv_sqrt = sp.diags(d_inv_sqrt)\n return adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).astype(np.float32).todense()", "def _estimate_adjacency_matrix(self, X):\r\n\t\tB = np.zeros([X.shape[1], X.shape[1]], dtype='float64')\r\n\t\tfor i in range(1, len(self._causal_order)):\r\n\t\t\tcoef = self._predict_adaptive_lasso(\r\n\t\t\t\tX, self._causal_order[:i], self._causal_order[i])\r\n\t\t\tB[self._causal_order[i], self._causal_order[:i]] = coef\r\n\t\tself.adjacency_matrix_ = B\r\n\t\treturn self", "def to_sparse_matrix(self, grid, format=None):\n S = self.centered_stencil()\n # print(\"grid :\")\n\n grid = tuple(grid)\n # print(grid)\n if not (np.asarray(S.shape) % 2 == 1).all():\n raise ValueError('all stencil dimensions must be odd')\n\n assert_condition(len(grid) == np.rank(S), ValueError,\n 'stencil rank must equal number of grid dimensions')\n assert_condition(min(grid) >= 1, ValueError,\n 'grid dimensions must be positive')\n\n N_v = np.prod(grid) # number of vertices in the mesh\n N_s = (S != 0).sum() # number of nonzero stencil entries\n\n # diagonal offsets\n diags = np.zeros(N_s, dtype=int)\n\n # compute index offset of each dof within the stencil\n strides = np.cumprod([1] + list(reversed(grid)))[:-1]\n indices = tuple(i.copy() for i in S.nonzero())\n for i,s in zip(indices,S.shape):\n i -= s // 2\n for stride,coords in zip(strides, reversed(indices)):\n diags += stride * coords\n\n #\n data = S[S != 0].repeat(N_v).reshape(N_s, N_v)\n indices = np.vstack(indices).T\n\n # zero boundary connections\n for index,diag in zip(indices,data):\n diag = diag.reshape(grid)\n for n,i in enumerate(index):\n if i > 0:\n s = [ slice(None) ]*len(grid)\n s[n] = slice(0,i)\n diag[s] = 0\n elif i < 0:\n s = [ slice(None) ]*len(grid)\n s[n] = slice(i,None)\n diag[s] = 0\n\n # remove diagonals that lie outside matrix\n mask = abs(diags) < N_v\n if not mask.all():\n diags = diags[mask]\n data = data[mask]\n\n # sum duplicate diagonals\n if len(np.unique(diags)) != len(diags):\n new_diags = np.unique(diags)\n new_data = np.zeros( (len(new_diags),data.shape[1]), dtype=data.dtype)\n for dia,dat in zip(diags,data):\n n = np.searchsorted(new_diags,dia)\n new_data[n,:] += dat\n\n diags = new_diags\n data = new_data\n\n return sprs.dia_matrix((data,diags), shape=(N_v, N_v)).asformat(format)", "def generate_full_adj(self):\n edges = np.zeros(shape=(self.n_balls, self.n_balls))\n row_idx = 0 # start filling adjacency mat from root node\n col_idx = 1 # skip the root node and start from 2nd node\n for l in range(self.nl):\n for n in range(self.nn[l]):\n edges[row_idx, col_idx:col_idx + self.nc[l]] = 1\n # Increase counters after filling connections for a parent node\n col_idx += self.nc[l]\n row_idx += 1\n return edges", "def sym_adj(adj):\n adj = ss.coo_matrix(adj)\n rowsum = np.array(adj.sum(1))\n d_inv_sqrt = np.power(rowsum, -0.5).flatten()\n d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0.\n d_mat_inv_sqrt = ss.diags(d_inv_sqrt)\n return np.array(adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).astype(np.float32).todense())", "def to_csr(self):\n return sparse.csr_matrix((self.data, (self.col, self.row)),\n shape=(self.nrows, self.ncols))", "def __init_matrix(self, adjacencyMatrix, **optional):\n\t\t# Error handling\n if adjacencyMatrix.ndim!=2 or adjacencyMatrix.shape[0]!=adjacencyMatrix.shape[1]:\n raise TypeError(\"\\'adjacencyMatrix\\' has to be a square matrix.\")\n elif np.any(adjacencyMatrix<0):\n raise ValueError(\"The weights have to be positive.\")\n elif not (str(adjacencyMatrix.dtype).startswith(\"float\") or str(adjacencyMatrix.dtype).startswith(\"int\")):\n raise TypeError(\"Weights have to be of the type \\'float*\\' or \\'int*\\'.\")\n elif np.any(np.diagonal(adjacencyMatrix)):\n raise ValueError(\"The graph is not allowed to contain circles of length 1.\")\n\t\t# Save shape and type\n self.__size = adjacencyMatrix.shape[0]\n self.__type = adjacencyMatrix.dtype\n if \"vertexNames\" in optional:\n # List of names\n self.__name_list = optional[\"vertexNames\"]\n if len(self.__name_list)!=self.__size:\n raise ValueError(\"There have not been specified as many names in \\'vertexNames\\' as there are nodes.\")\n else:\n if len(set(self.__name_list)) != len(self.__name_list):\n raise ValueError(\"Vertex names are not unique.\")\n else:\n # Hashmap name->index\n self.__names = {self.__name_list[i]:i for i in range(0,self.__size)}\n else:\n warn(Warning(\"If node names in adjacency matrix representation are not explicitly set with the parameter \\'vertexNames\\', partial and supergraphs cannot be recognized reliably.\"))\n self.__name_list = [i for i in range(0,self.__size)]\n self.__names = {self.__name_list[i]:i for i in range(0,self.__size)}\n \n # Adjacency matrix\n self.__mat = adjacencyMatrix\n # Adjacency matrix\n self.__adj = [[(i,self.__mat[vertices][i]) for i in np.arange(self.__size) if self.__mat[vertices][i]>0] for vertices in np.arange(0,self.__size)]\n \n # List of known supergraphs\n self.__supergraph = []", "def init_graph(self):\n import dgl\n\n adj_list = []\n for rel_type in range(1, self.n_relations, 1):\n edge_idxs = self.ckg.filter_edges(\n lambda edge: edge.data[\"relation_id\"] == rel_type\n )\n sub_graph = (\n dgl.edge_subgraph(self.ckg, edge_idxs, preserve_nodes=True)\n .adjacency_matrix(transpose=False, scipy_fmt=\"coo\")\n .astype(\"float\")\n )\n rowsum = np.array(sub_graph.sum(1))\n d_inv = np.power(rowsum, -1).flatten()\n d_inv[np.isinf(d_inv)] = 0.0\n d_mat_inv = sp.diags(d_inv)\n norm_adj = d_mat_inv.dot(sub_graph).tocoo()\n adj_list.append(norm_adj)\n\n final_adj_matrix = sum(adj_list).tocoo()\n indices = torch.LongTensor([final_adj_matrix.row, final_adj_matrix.col])\n values = torch.FloatTensor(final_adj_matrix.data)\n adj_matrix_tensor = torch.sparse.FloatTensor(indices, values, self.matrix_size)\n return adj_matrix_tensor.to(self.device)" ]
[ "0.7717279", "0.743809", "0.7303273", "0.7291218", "0.72357076", "0.72152543", "0.7204456", "0.7193659", "0.71897316", "0.7153426", "0.7127393", "0.7073265", "0.70631844", "0.7041357", "0.7021406", "0.69531685", "0.69405574", "0.6906811", "0.69028586", "0.6890784", "0.6887394", "0.68677765", "0.6849658", "0.67803043", "0.6775576", "0.6760419", "0.6688568", "0.6682787", "0.6656284", "0.6648076", "0.66431177", "0.6593298", "0.652426", "0.65004086", "0.6487555", "0.6487555", "0.6461243", "0.6454674", "0.64476913", "0.643346", "0.64249694", "0.6406803", "0.63887584", "0.63742304", "0.6359211", "0.633803", "0.63363636", "0.63302964", "0.6321149", "0.6321149", "0.6316054", "0.6218874", "0.6210795", "0.6157977", "0.61575204", "0.6151328", "0.6149903", "0.61418086", "0.61278135", "0.6127609", "0.6116985", "0.60982573", "0.6090193", "0.60823673", "0.6081296", "0.6072195", "0.60452676", "0.6041483", "0.60404557", "0.60382783", "0.60345167", "0.59988075", "0.5995447", "0.5962854", "0.5957102", "0.5949622", "0.5947425", "0.5934213", "0.5932508", "0.5929502", "0.59285206", "0.59285206", "0.59285206", "0.59285206", "0.59285206", "0.59227526", "0.59047055", "0.5888422", "0.5886491", "0.58844954", "0.58820915", "0.58774424", "0.58727014", "0.5864564", "0.58600533", "0.58503425", "0.58482844", "0.5845009", "0.58433646", "0.5841467", "0.5822497" ]
0.0
-1
Compute adjacency matrix weighted by distances. It generates an adjacency matrix where the entries are the distances between neighboring vertices.
def mesh_dist(tris, vert): edges = mesh_edges(tris).tocoo() # Euclidean distances between neighboring vertices dist = np.linalg.norm(vert[edges.row, :] - vert[edges.col, :], axis=1) dist_matrix = csr_matrix((dist, (edges.row, edges.col)), shape=edges.shape) return dist_matrix
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def adjacency(dist, idx):\n M, k = dist.shape\n assert M, k == idx.shape\n assert dist.min() >= 0\n # Weights.\n sigma2 = np.mean(dist[:, -1]) ** 2\n #print sigma2\n dist = np.exp(- dist ** 2 / sigma2)\n\n # Weight matrix.\n I = np.arange(0, M).repeat(k)\n J = idx.reshape(M * k)\n V = dist.reshape(M * k)\n W = scipy.sparse.coo_matrix((V, (I, J)), shape=(M, M))\n # No self-connections.\n W.setdiag(0)\n\n # Non-directed graph.\n bigger = W.T > W\n W = W - W.multiply(bigger) + W.T.multiply(bigger)\n return W", "def adjacency(dist, idx):\n M, k = dist.size()\n # assert M, k == idx.shape\n # assert dist.min() >= 0\n\n # Weights.\n sigma2 = torch.mean(dist[:, -1])**2\n dist = torch.exp(- dist**2 / sigma2)\n\n # Weight matrix.\n I = torch.arange(0, M).repeat_interleave(k).contiguous().view(1, -1).cuda()\n J = idx.contiguous().view(1, -1)\n V = dist.contiguous().view(-1)\n indices = torch.cat([I, J], dim=0)\n W = torch.sparse.FloatTensor(indices, V, torch.Size([M, M])).cuda()\n # W = scipy.sparse.coo_matrix((V.cpu().numpy(), (I.cpu().numpy(), J.cpu().numpy())), shape=(M, M))\n\n # No self-connections.\n # W.setdiag(1)\n\n # Non-directed graph.\n # bigger = W.T > W\n # W = W - W.multiply(bigger) + W.T.multiply(bigger)\n #\n # assert W.nnz % 2 == 0\n # assert np.abs(W - W.T).mean() < 1e-10\n # assert type(W) is scipy.sparse.csr.csr_matrix\n return W", "def adjaceny_matrix(self):\n \n try:\n return self._adj_matrix\n except AttributeError:\n am = np.zeros((self.n, self.n))\n for edge, weight in self.weights.items():\n am[edge[0], edge[1]] = weight\n self._adj_matrix = am\n return self._adj_matrix", "def generate_weighted_graph():\n \n Adj_Matrix = np.array([\n [0.0, 0.2, 0.2, 0.3, 0.2, 0.1],\n [0.1, 0.0, 0.3, 0.3, 0.1, 0.2],\n [0.3, 0.2, 0.0, 0.1, 0.2, 0.2],\n [0.1, 0.4, 0.2, 0.0, 0.2, 0.1],\n [0.2, 0.2, 0.2, 0.2, 0.0, 0.2],\n [0.2, 0.1, 0.1, 0.3, 0.3, 0.0]\n ])\n\n return Adj_Matrix", "def adjacency_matrix():\n file_path = PROJECT_PATH + \"/geographycal_data/adjacency_matrix/Howgrp.txt\"\n router = Router(adjacency_metrix=file_path)\n # router.write2vtk(router.graph, \"adjacency_matrix\")\n # nx.draw(router.graph)\n # plt.show()\n # adjacency matrix\n A = nx.adjacency_matrix(router.graph, weight=None).toarray()\n # ... and its spectrum\n nx.adjacency_spectrum(router.graph, weight=None)\n # weighted adjacency\n W = nx.adjacency_matrix(router.graph)\n # D\n I = np.reshape(np.ones(12), (-1, 1))\n D = np.matmul(A, I)\n # combinatorial graph Laplacian L = D - A\n L = nx.laplacian_matrix(router.graph, weight=None)\n # ... and his spectrum\n nx.laplacian_spectrum(router.graph, weight=None)\n # weighted Laplacian\n Y = nx.laplacian_matrix(router.graph)\n\n # Note\n sumD = np.matmul(I.transpose(), D)\n sumD = sumD[0][0]\n sumA = 0\n for row in np.nditer(A):\n for e in np.nditer(row):\n sumA += e\n\n # Fielder vector\n fiedler_vector = nx.fiedler_vector(router.graph, weight=None)\n\n # Matrix Double index Sum\n\n def D_app(F):\n return D * F\n\n def A_app(F):\n AF = np.zeros(len(F))\n for i, e_i in enumerate(F):\n for j, e_j in enumerate(F):\n if (A[i][j] != 0):\n AF[i] += F[j]\n return AF", "def build_adjacency(self, connectivity_matrix, n_neighbours):\n \n #Get k strongest connections\n connections, idx = self.get_k_strongest_connections(connectivity_matrix, n_neighbours)\n \n #Sparse matrix\n M, k = connections.shape\n assert M, k == idx.shape #M - number of vertices. k == nearest neighbours \n assert connections.min() >= 0\n\n # Weights.\n sigma2 = np.mean(connections[:, -1])**2\n connections = np.exp(- connections**2 / sigma2)\n\n # Weight matrix.\n I = np.arange(0, M).repeat(k) #row\n J = idx.reshape(M*k) #col\n V = connections.reshape(M*k) #data \n W = sparse.coo_matrix((V, (I, J)), shape=(M, M)) #COO is a fast format for constructing sparse matrices\n\n # No self-connections.\n W.setdiag(0)\n\n # Non-directed graph.\n bigger = W.T > W\n W = W - W.multiply(bigger) + W.T.multiply(bigger)\n return W", "def get_distance_matrix(self):\n names = self.get_named_leaves()\n num_names = len(names)\n dist_mat = np.zeros((num_names, num_names), dtype='float')\n for i, j in itertools.combinations(range(num_names), 2):\n node1, node2 = self.node_names[names[i]], self.node_names[names[j]]\n dist = self.node_distance(node1, node2)\n dist_mat[i,j] = dist\n dist_mat[j,i] = dist\n return names, dist_mat", "def _create_distance_matrix(mesh):\n l = len(mesh.faces)\n\n faces = polygons(mesh.faces, mesh.vertices, mesh.face_normals, mesh.area_faces)\n # map from edge-key to adjacent faces\n adj_faces_map = {}\n # find adjacent faces by iterating edges\n for index, face in enumerate(faces):\n for edge in face.edge_keys:\n if (edge[0] > edge[1]):\n new_edge = (edge[1], edge[0])\n else:\n new_edge = (edge[0], edge[1])\n if new_edge in adj_faces_map:\n adj_faces_map[new_edge].append(index) # ไธ€ๅฏนๅคš\n else:\n adj_faces_map[new_edge] = [index]\n\n # helping vectors to create sparse matrix later on\n row_indices = []\n col_indices = []\n Gval = [] # values for matrix of angular distances\n Aval = [] # values for matrix of geodesic distances\n # iterate adjacent faces and calculate distances\n for edge, adj_faces in adj_faces_map.items():\n if len(adj_faces) == 2:\n i = adj_faces[0]\n j = adj_faces[1]\n # ไธ€ๆก่พน่ฟžๆŽฅ็š„ไธคไธช้ข\n Gtemp = _geodesic_distance(mesh, faces[i], faces[j], edge) # ๆต‹ๅœฐ่ท็ฆป\n Atemp = _angular_distance(mesh, faces[i], faces[j]) # ่ง’่ท็ฆป # ๅ…ถๅฎžๆ˜ฏไฝ™ๅผฆ่ท็ฆป\n Gval.append(Gtemp)\n Aval.append(Atemp)\n row_indices.append(i)\n col_indices.append(j)\n # add symmetric entry\n Gval.append(Gtemp)\n Aval.append(Atemp)\n row_indices.append(j)\n col_indices.append(i)\n\n elif len(adj_faces) > 2:\n print(\"Edge with more than 2 adjacent faces: \" + str(adj_faces) + \"!\")\n\n Gval = numpy.array(Gval)\n Aval = numpy.array(Aval)\n # deltaๆ˜ฏๅŽปๅ…จๅฑ€ๅ˜้‡๏ผŒๅค–้ƒจไผ ๅ…ฅ็š„\n values = delta * Gval / numpy.mean(Gval) + \\\n (1.0 - delta) * Aval / numpy.mean(Aval)\n\n # create sparse matrix\n distance_matrix = scipy.sparse.csr_matrix(\n (values, (row_indices, col_indices)), shape=(l, l))\n return distance_matrix", "def get_full_weight_matrix_and_minimal_distances(G,\n sink_nodes,\n use_inverse_distance_as_adjacency = False,\n return_distance_matrix = False,\n ):\n\n nodes = set(list(G.nodes()))\n N = G.number_of_nodes()\n transient_nodes = list(nodes - set(sink_nodes))\n d = dict(nx.all_pairs_dijkstra_path_length(G))\n D = np.zeros((N,N))\n\n for i in range(N-1):\n for j in range(i+1,N):\n D[i,j] = d[i][j]\n D[j,i] = d[j][i]\n\n A = nx.adjacency_matrix(G).toarray()\n A = A.astype(float)\n W = A.copy()\n\n if use_inverse_distance_as_adjacency:\n W[A>0] = 1/A[A>0]\n else:\n W[A>0] = 1\n\n min_distances = D[:,sink_nodes].min(axis=1)\n\n if return_distance_matrix:\n return A, W, min_distances, D\n else:\n return A, W, min_distances", "def create_adjacency_matrix(self, edges):\n matrix = np.zeros([self.max_words, self.max_words * self.edge_types * 2])\n for edge in edges:\n src = edge[0]\n e_type = edge[1]\n dest = edge[2]\n self.set_matrix(matrix, src, dest, e_type, 1)\n return matrix", "def compute_adjacency_mst_and_distances(log_returns):\n # compute the correlation coefficients between each pair or stocks time series\n correlation_coefficients = np.corrcoef(log_returns.T)\n # compute distances\n distances = np.sqrt(2 * (1 - correlation_coefficients))\n # get the number of nodes of the graph\n n_nodes = distances.shape[0]\n # since distances is a symmetric matrix, we only need the upper triangular part\n upper_vec_dist = distances[np.triu_indices(n_nodes, k=1)]\n # sort distances\n idx_sorted = np.argsort(upper_vec_dist)\n # get the edge pairs of the upper triangular part\n edge_pairs = np.asarray(np.triu_indices(n_nodes, k=1))\n # sort those edge pairs according to increasing distances\n edge_pairs_sorted = edge_pairs[:,idx_sorted]\n # initialize adjacency matrix as a fully disconnected graph\n adjacency = np.zeros((n_nodes, n_nodes))\n # loop over the N * (N - 1) edges\n for k in range(len(upper_vec_dist)):\n # get the pair with the k-th smallest distance\n i, j = edge_pairs_sorted[:, k]\n # compute the connected components of the current graph\n n_comp, labels = connected_components(csgraph=csr_matrix(adjacency),\n directed=False, return_labels=True)\n # if node i and j do not belog to the same component\n # then it means they are disconnected, in which case\n # we should connect them\n if not labels[i] == labels[j]:\n adjacency[i, j] = 1 # connect nodes i and j\n # we can terminate the loop earlier if the graph is already a tree\n # note: a tree is a connected graph whose number of edges is exactly\n # n_nodes - 1\n if (int(0.5*np.sum(adjacency > 0)) == (n_nodes - 1)\n and n_comp == 1):\n break\n # since we only looped over the pairs of the upper triangular part\n # we need to symmetrize the adjacency matrix\n adjacency = .5 * (adjacency + adjacency.T)\n return distances, adjacency", "def adj_matrix(G,nodelist=None,weight='weight'):\n return nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight)", "def formAdjacencyMatrix(self):\n self.adjacencyMatrix = dict()\n for i in self.node:\n self.adjacencyMatrix[i] = dict()\n for j in self.node:\n self.adjacencyMatrix[i][j] = 0\n \n for ij in self.link:\n self.adjacencyMatrix[self.link[ij].tail][self.link[ij].head] = 1", "def flatten_distance_matrix(dist):\n inds = np.triu_indices(dist.shape[0])\n return dist[inds]", "def _compute_adjacency_matrix(self):\n\n # Set up a quick-reference index to map cells to indexes\n for i, cell in enumerate(self.sim.cells):\n self._cell_indexes[cell] = i\n\n if all([self.sim.hub.cells == [self.sim.damaged],\n self.sim.damaged not in self.sim.cells]):\n # Add the \"damaged\" virtual cell to the index if we need it\n self._cell_indexes[self.sim.damaged] = len(self.sim.cells)\n\n node_count = len(list(self._cell_indexes.keys()))\n g_sparse = np.zeros((node_count, node_count), dtype=float)\n g_sparse[:] = np.inf\n\n for cluster in self.sim.clusters + [self.sim.hub]:\n cluster_tour = cluster.tour\n i = len(cluster_tour.vertices) - 1\n j = 0\n while j < len(cluster_tour.vertices):\n start_vertex = cluster_tour.vertices[i]\n stop_vertex = cluster_tour.vertices[j]\n\n start_pt = cluster_tour.points[start_vertex]\n stop_pt = cluster_tour.points[stop_vertex]\n distance = np.linalg.norm(stop_pt - start_pt)\n\n start_seg = cluster_tour.objects[start_vertex]\n stop_seg = cluster_tour.objects[stop_vertex]\n\n start_index = self._cell_indexes[start_seg]\n stop_index = self._cell_indexes[stop_seg]\n\n g_sparse[start_index, stop_index] = distance\n\n i = j\n j += 1\n\n g_sparse = sp.csgraph_from_dense(g_sparse, null_value=np.inf)\n return g_sparse", "def weight_matrices(graph):\n for u, v in graph.edges():\n w = graph[u][v]['weight']\n d = graph.node[u]\n graph[u][v]['wd'] = (w, -d)\n\n wd = floyd_warshall(graph, min=(0, 0), max=(maxint, 0),\n weight='wd', add=add_tuple)\n\n W = [[0 for n in graph] for n in graph]\n D = [[0 for n in graph] for n in graph]\n for u, row in enumerate(wd):\n for v, (w, d) in enumerate(row):\n W[u][v] = w\n D[u][v] = graph.node[v] - d\n\n return W, D", "def _generate_weighted_adj_matrices(self):\n self.weighted_adj_matrices = dict()\n mes = []\n args = []\n for metaedge in self.metaedges:\n mes.append(metaedge)\n args.append(self._prepare_parallel_weighted_adj_matrix_args(self.edge_df.query('abbrev == @metaedge')))\n res = parallel_process(array=args, function=mt.get_adj_matrix, use_kwargs=True, n_jobs=self.n_jobs,\n front_num=0)\n for metaedge, matrix in zip(mes, res):\n self.weighted_adj_matrices[metaedge] = matrix", "def adj_matrix(self):\n return nx.adj_matrix(self.network)", "def get_adjacency_matrix(self):\n\n # Get dimension of future matrix\n dim = max([node.value for node in self.nodes])\n\n # Initialize square matrix of zeros\n # Matrix is square and indexes by from, to node values\n adjacency_matrix = [[0 for _ in range(dim+1)] for _ in range(dim+1)]\n\n # Insert edge value at the from, to coordinates\n # That is, fully identify each \"from, edge, to\" triplet\n for edge in self.edges:\n row = edge.node_from.value\n col = edge.node_to.value\n val = edge.value\n\n adjacency_matrix[row][col] = val\n\n # Return matrix of edge values indexed by from, to node values\n return adjacency_matrix", "def _build_downhill_matrices(self, weight=0.6667):\n\n from scipy import sparse as sparse\n \n\n down_neighbour = np.empty(self.tri.npoints, dtype=np.int)\n\n for node in range (0,self.tri.npoints):\n down_neighbour[node] = self.neighbour_array_lo_hi[node][0]\n\n # Build a matrix of downhill-ness - one entry per node ! \n \n size = self.tri.npoints\n row_array = np.empty(size, dtype = int)\n col_array = np.empty(size, dtype = int)\n down_array = np.ones(size)\n accu_array = np.ones(size)\n\n\n for row in range(0, self.tri.npoints): \n row_array[row] = row\n col_array[row] = down_neighbour[row]\n \n accuMCOO = sparse.coo_matrix( (accu_array, (row_array, col_array)), shape=(size,size) ).T \n\n self.accumulatorMat = accuMCOO.tocsr() \n\n self._build_adjacency_matrix_1()\n self._build_adjacency_matrix_2()\n \n self.downhillMat = weight * self.adjacency1 + (1.0-weight) * self.adjacency2\n\n # A1 = self.downhillMat\n # A2 = self.downhillMat.dot(self.downhillMat)\n # A2a = A1 + A2\n # A4 = A2.dot(A2)\n # A4a = A2a + A2.dot(A2a)\n # A8 = A4.dot(A4)\n # A8a = A4a + A4.dot(A4a)\n # A16 = A8.dot(A8)\n # A16a = A8a + A8.dot(A8a)\n\n # self.downhillMat16 = A16\n # self.downhillMat8 = A8\n # self.downhillMat16a = A16a\n # self.downhillMat8a = A8a\n\n # We make it optional to build these as they are not sparse \n # This cleans up previously stored matrices\n\n self.downhillCumulativeMat = None\n self.sweepDownToOutflowMat = None\n \n return", "def convert_dist_results_to_matrix(distances, neighbors, dists_matrix_out):\n for i in range(neighbors.shape[0]):\n for j in range(neighbors.shape[1]): # jth closest neighbor for item i\n neighb = neighbors[i,j]\n # insert (i,neighb) such that i < neighb\n min_in = min(i, neighb)\n max_in = max(i, neighb)\n if i != neighb and dists_matrix_out[min_in, max_in] == 0:\n dists_matrix_out[min_in, max_in] = distances[i,j]", "def vec2adjmat(source, target, symmetric=True):\n df = pd.DataFrame(np.c_[source, target], columns=['source', 'target'])\n # Make adjacency matrix\n adjmat = pd.crosstab(df['source'], df['target'])\n # Get all unique nodes\n # nodes = np.unique(np.c_[adjmat.columns.values, adjmat.index.values].flatten())\n nodes = np.unique(list(adjmat.columns.values) + list(adjmat.index.values))\n\n # Make the adjacency matrix symmetric\n if symmetric:\n # Add missing columns\n node_columns = np.setdiff1d(nodes, adjmat.columns.values)\n for node in node_columns:\n adjmat[node]=0\n\n # Add missing rows\n node_rows = np.setdiff1d(nodes, adjmat.index.values)\n adjmat=adjmat.T\n for node in node_rows:\n adjmat[node]=0\n adjmat=adjmat.T\n\n # Sort to make ordering of columns and rows similar\n [IA, IB] = ismember(adjmat.columns.values, adjmat.index.values)\n adjmat = adjmat.iloc[IB, :]\n adjmat.index.name='source'\n adjmat.columns.name='target'\n\n return(adjmat)", "def vec2adjmat(source, target, symmetric=True):\n df = pd.DataFrame(np.c_[source, target], columns=['source', 'target'])\n # Make adjacency matrix\n adjmat = pd.crosstab(df['source'], df['target'])\n # Get all unique nodes\n # nodes = np.unique(np.c_[adjmat.columns.values, adjmat.index.values].flatten())\n nodes = np.unique(list(adjmat.columns.values) + list(adjmat.index.values))\n\n # Make the adjacency matrix symmetric\n if symmetric:\n # Add missing columns\n node_columns = np.setdiff1d(nodes, adjmat.columns.values)\n for node in node_columns:\n adjmat[node]=0\n\n # Add missing rows\n node_rows = np.setdiff1d(nodes, adjmat.index.values)\n adjmat=adjmat.T\n for node in node_rows:\n adjmat[node]=0\n adjmat=adjmat.T\n\n # Sort to make ordering of columns and rows similar\n [IA, IB] = ismember(adjmat.columns.values, adjmat.index.values)\n adjmat = adjmat.iloc[IB, :]\n adjmat.index.name='source'\n adjmat.columns.name='target'\n\n return(adjmat)", "def weightedadjacency(X, F, inverse=False):\n\n n = F.max()+1\n\n # Compute weights for all links (euclidean distance)\n weights = np.sqrt(np.concatenate([((X[F[:, 0], :]-X[F[:, 1], :])**2).sum(1),\n ((X[F[:, 0], :]-X[F[:, 2], :])**2).sum(1),\n ((X[F[:, 1], :]-X[F[:, 0], :])**2).sum(1),\n ((X[F[:, 1], :]-X[F[:, 2], :])**2).sum(1),\n ((X[F[:, 2], :]-X[F[:, 0], :])**2).sum(1),\n ((X[F[:, 2], :]-X[F[:, 1], :])**2).sum(1)]))\n\n # penalize small distances (avoid division by zero)\n eps = 1e-6\n\n if inverse:\n weights = 1/weights\n\n weights = (weights + eps)\n\n # remove duplicated edges\n rows = np.concatenate([F[:, 0], F[:, 0],\n F[:, 1], F[:, 1],\n F[:, 2], F[:, 2]])\n\n cols = np.concatenate([F[:, 1], F[:, 2],\n F[:, 0], F[:, 2],\n F[:, 0], F[:, 1]])\n\n combos = np.column_stack([rows, cols])\n\n [rc, idx] = np.unique(combos, axis=0, return_index=True)\n weights = weights[idx]\n\n W = sparse.csr_matrix((weights, (rc[:, 0], rc[:, 1])), shape=(n, n))\n W = (W + W.transpose())/2\n\n return W", "def construct_graph(indices, distances, n):\n CSR_graph = scipy.sparse.csr_matrix((distances, [indices[:, 0], indices[:, 1]]), shape=(n, n))\n return CSR_graph", "def make_adjacency_matrix(X, metric=\"correlation\", n_neighbors=6, n_jobs=1):\n knn = NearestNeighbors(n_neighbors=n_neighbors,\n metric=metric,\n algorithm=\"brute\",\n n_jobs=n_jobs,\n ).fit(X)\n\n adjacency_matrix = knn.kneighbors_graph(X,\n mode=\"distance\",\n ).toarray()\n\n return adjacency_matrix", "def get_adjacency_matrix(self):\n m = zeros(self.size)\n perm = self.array_form\n for i in xrange(self.size - 1):\n m[perm[i], perm[i + 1]] = 1\n return m", "def __build_distance_matrix(self):\n for i in range(0, len(self.__corpus)):\n doc_i = self.__corpus[i]\n for j in range(i + 1, len(self.__corpus)):\n doc_j = self.__corpus[j]\n distance = doc_i.calc_distance(doc_j)\n self.__distance_matrix.append(distance)", "def adjacency_matrix(g):\n nodes = sorted(g.keys())\n adj = []\n for row_node in nodes:\n row = []\n for column_node in nodes:\n if column_node in g[row_node]:\n row.append(1)\n else:\n row.append(0)\n adj.append(row)\n \n return adj", "def adjacency(self):\n if self.E > 0:\n i = self.edges[:, 0]\n j = self.edges[:, 1]\n adj = coo_matrix((np.ones(self.E), (i, j)),\n shape=(self.V, self.V))\n else:\n adj = coo_matrix((self.V, self.V))\n return adj", "def calculate_connectivity_matrix(molecule, element_diagonal=False):\n num_atoms = molecule.GetNumAtoms()\n adjacency = np.zeros((num_atoms, num_atoms))\n for bond in molecule.GetBonds():\n bond_type = str(bond.GetBondType()).lower()\n bond_order = get_bond_order(bond_type)\n i = bond.GetBeginAtomIdx()\n j = bond.GetEndAtomIdx()\n adjacency[i, j] = bond_order\n adjacency[j, i] = bond_order\n if element_diagonal:\n for i, atom in enumerate(molecule.GetAtoms()):\n adjacency[i, i] = atom.GetAtomicNum()\n return adjacency", "def adjacency(G, nodelist=None, weight=\"weight\"):\n\n if nodelist is None:\n nodelist = G.nodes()\n\n A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format=\"csr\")\n\n return A", "def weighted_jaccard_distance_matrix(X, w, n_jobs=1):\n vint = np.vectorize(int)\n X_int = vint(X*100)\n print \"starting to make distance matrix\"\n distance_matrix = pairwise_distances(X_int, w=w, metric=weighted_jaccard,n_jobs=n_jobs)\n print \"done making distance matrix\"\n return distance_matrix", "def compute_adjacency_matrix(G):\n\n iG = nx.convert_node_labels_to_integers(G)\n adj_list = iG.adjacency_list()\n n_nodes = len(iG.nodes())\n\n adj_mat = np.zeros((n_nodes, n_nodes))\n for x in xrange(n_nodes):\n adj_mat[x, adj_list[x]] = 1\n\n return adj_mat", "def adjacencyMatrix(R, edges):\n A = np.zeros((len(R),len(R)))\n for i in range(0, len(edges)):\n A[edges[i][0]][edges[i][1]] = 1\n return A", "def _adj(w):\r\n return (w[:d * d] - w[d * d:]).reshape([d, d])", "def calc_dist_matrix(self):\n\n self.dist_matrix = spatial.distance.squareform(spatial.distance.pdist(self.data_vector,metric=\"hamming\"))\n\n self.dist_frame = pd.DataFrame(self.dist_matrix,\n index = self.seq_strings,\n columns = self.seq_strings)", "def _distance_matrix(self):\n\n # Log the type of metric being used in Sequencing\n logger.info('Using {} Distance'.format(self.measure))\n\n # Convert the nodal coordinate tuples to a np.array\n coords = np.vstack(map(np.array, self.coords.values()))\n \n if self.measure == 'haversine':\n # Partially applied haversine function that takes a coord and computes the vector distances for all coords\n haversine = lambda coord: get_hav_distance(coords[:, 0], coords[:, 1], *coord) \n # Map the partially applied function over all coordinates, and stack to a matrix\n return np.vstack(map(haversine, coords))\n\n # Partially applied haversine function that takes a coord and computes the vector distances for all coords\n euclidean = lambda coord: get_euclidean_dist(coords, coord)\n # Map the partially applied function over all coordinates, and stack to a matrix\n return np.vstack(map(euclidean, coords))", "def _weight_edges(self):\n weights = {}\n for edge in self.network.edges():\n weights[edge] = self.distance_matrix[edge]\n nx.set_edge_attributes(self.network, 'weight', weights)", "def _build_adjacency_matrix_1(self):\n\n from scipy import sparse as sparse\n \n down_neighbour = np.empty(self.tri.npoints)\n\n for node in range (0,self.tri.npoints):\n down_neighbour[node] = self.neighbour_array_lo_hi[node][0]\n\n # Build a matrix of downhill-ness - one entry per node ! \n \n size = self.tri.npoints\n row_array = np.empty(size)\n col_array = np.empty(size)\n down_array = np.ones(size)\n\n # Catch cases where node is local low point (i.e. it is its own low neighbour)\n\n for row in range(0, self.tri.npoints): \n row_array[row] = row\n col_array[row] = down_neighbour[row]\n if row == down_neighbour[row]:\n down_array[row] = 0.0\n \n\n downMCOO = sparse.coo_matrix( (down_array, (row_array, col_array)), shape=(size,size) ).T \n\n self.adjacency1 = downMCOO.tocsr() \n\n # Catch pathological cases - sometimes if there is a flat spot on the boundary, then \n # the filling method above will produce a non-square matrix. This is caused by\n # repetition of values in the COO list which are summed on conversion.\n\n if downMCOO.shape[0] != downMCOO.shape[1]:\n # This approach works but is a lot slower\n\n print \"\"\"\n Warning: the downhill matrices require a slow build method. This is probably\n Because there are degeneracies in the slope - particularly at the boundaries\n A small random perturbation is usually enough to fix this problem\n \"\"\"\n downMat = sparse.lil_matrix((size, size))\n\n for row in range(0, self.tri.npoints): \n downMat[down_neighbour[row],row] = 1.0\n\n for row in range(0, self.tri.npoints): \n if down_neighbour[row] == row:\n downMat[row,row] = 0.0\n \n self.adjacency1 = downMat.T.tocsr() \n \n return", "def get_adj_matrix(self):\n # This is currently implemented for the case when there are only two edge types (edge and no-edge)\n assert self.Z_edges_logits.shape[1] == 2\n Z_edge_logits = self.Z_edges_logits.detach().cpu().numpy() # [num_edges, 2]\n prob = np.exp(Z_edge_logits) / np.sum(np.exp(Z_edge_logits), axis=-1, keepdims=True) # [num_edges, 2]\n adj_matrix = np.zeros((self.num_nodes, self.num_nodes))\n mask = np.ones((self.num_nodes, self.num_nodes), dtype=bool) & ~np.eye(self.num_nodes, dtype=bool)\n adj_matrix[mask] = prob[:, 1]\n return adj_matrix", "def build_regular_adj(sampled_LU_INDEX, distance, pos2node):\n num_nodes = np.nonzero(sampled_LU_INDEX)[0].shape[0]\n A = np.zeros((num_nodes, num_nodes))\n adj_dict = {}\n for i in range(sampled_LU_INDEX.shape[0]):\n for j in range(sampled_LU_INDEX.shape[1]):\n \n if (i,j) in pos2node:\n node_ind = pos2node[(i,j)]\n neighbors = []\n for pos in [(i-distance,j),(i+distance,j),(i,j+distance),(i,j-distance)]:\n if pos in pos2node:\n neighbors.append(pos2node[pos])\n A[node_ind, pos2node[pos]] = 1.0\n A[pos2node[pos], node_ind] = 1.0\n \n adj_dict[node_ind] = neighbors\n \n return A, adj_dict", "def _create_adjacency_matrix(layer_edges):\n A = defaultdict(int)\n for l, edges in list(layer_edges.items()):\n for edge in edges:\n A[(edge[0], edge[1], l)] += 1\n A[(edge[1], edge[0], l)] += 1 \n return A", "def updateGraphByEuclideanDistance(self, graph, neighborDistance):\r\n graph.adjacencyMatrix = np.matrix(np.zeros(graph.adjacencyMatrix.shape))\r\n for a1 in range(self.agentNum):\r\n for a2 in range(a1+1, self.agentNum):\r\n if np.linalg.norm(self.agentPos[:,a1] - self.agentPos[:,a2]) <= neighborDistance:\r\n graph.adjacencyMatrix[a1,a2] = 1\r\n graph.adjacencyMatrix[a2,a1] = 1\r\n else:\r\n graph.adjacencyMatrix[a1,a2] = 0\r\n graph.adjacencyMatrix[a2,a1] = 0\r\n \r\n assert (graph.adjacencyMatrix == graph.adjacencyMatrix.T).all()", "def get_distance_matrix(visits: List[str], distances: Dict[Tuple[str, str], float]) -> List[List[float]]:\n\n return [[distances[i,j] for j in visits] for i in visits]", "def to_undirected(adjmat):\n num_rows=adjmat.shape[0]\n num_cols=adjmat.shape[1]\n adjmat_directed=np.zeros((num_rows, num_cols), dtype=int)\n tmpadjmat=adjmat.astype(int)\n\n for i in range(num_rows):\n for j in range(num_cols):\n adjmat_directed[i, j] = tmpadjmat.iloc[i, j] + tmpadjmat.iloc[j, i]\n\n adjmat_directed=pd.DataFrame(index=adjmat.index, data=adjmat_directed, columns=adjmat.columns, dtype=bool)\n return(adjmat_directed)", "def to_undirected(adjmat):\n num_rows=adjmat.shape[0]\n num_cols=adjmat.shape[1]\n adjmat_directed=np.zeros((num_rows, num_cols), dtype=int)\n tmpadjmat=adjmat.astype(int)\n\n for i in range(num_rows):\n for j in range(num_cols):\n adjmat_directed[i, j] = tmpadjmat.iloc[i, j] + tmpadjmat.iloc[j, i]\n\n adjmat_directed=pd.DataFrame(index=adjmat.index, data=adjmat_directed, columns=adjmat.columns, dtype=bool)\n return(adjmat_directed)", "def get_clusters_adjacencies(adjacency, clusters: list):\n clusters.sort(key=lambda t: len(t), reverse=True)\n id_to_cluster = get_id_to_cluster(clusters, adjacency.shape[0])\n num_clusters = len(clusters)\n mat = np.zeros((num_clusters, num_clusters))\n rows, cols = adjacency.nonzero()\n for i, j in zip(rows, cols):\n weight = adjacency[i, j]\n src_cluster = id_to_cluster[i]\n dest_cluster = id_to_cluster[j]\n mat[src_cluster, dest_cluster] += weight\n return mat", "def construct_4adj_graph(X):\n s0, s1, s2 = X.shape\n \n size_data = s0*s1\n \n xGrid, yGrid = np.meshgrid(np.arange(s0), np.arange(s1))\n totGrid = (xGrid*s1 + yGrid).transpose()\n \n horiz_edges = np.sqrt(np.sum((X[1:,:,:] - X[:-1,:,:])**2, axis=-1).flatten())\n indx_horiz_edges = totGrid[1:,:].flatten()\n indy_horiz_edges = totGrid[:-1,:].flatten()\n \n vert_edges = np.sqrt(np.sum((X[:,1:,:] - X[:,:-1,:])**2, axis=-1).flatten())\n indx_vert_edges = totGrid[:,1:].flatten()\n indy_vert_edges = totGrid[:,:-1].flatten()\n \n w = np.concatenate((horiz_edges, vert_edges), axis=0) + 1e-6\n u = np.concatenate((indx_horiz_edges, indx_vert_edges), axis=0)\n v = np.concatenate((indy_horiz_edges, indy_vert_edges), axis=0)\n \n return sp.sparse.csr_matrix((w,(u,v)), shape=(size_data, size_data))", "def _build_adjacency_matrix_2(self):\n\n from scipy import sparse as sparse\n \n down_neighbour = np.empty(self.tri.npoints)\n down_neighbour1 = np.empty(self.tri.npoints)\n\n for node in range (0,self.tri.npoints):\n down_neighbour[node] = self.neighbour_array_lo_hi[node][0]\n down_neighbour1[node] = self.neighbour_array_lo_hi[node][1]\n\n # Build a matrix of downhill-ness - one entry per node ! \n \n size = self.tri.npoints\n row_array = np.empty(size)\n col_array = np.empty(size)\n down_array = np.ones(size)\n\n # Catch cases where node is local low point (i.e. it is its own low neighbour)\n for row in range(0, self.tri.npoints): \n row_array[row] = row\n col_array[row] = down_neighbour1[row]\n if row == down_neighbour[row]:\n down_array[row] = 0.0 \n if row == down_neighbour1[row]:\n col_array[row] = down_neighbour[row]\n\n\n downMCOO = sparse.coo_matrix( (down_array, (row_array, col_array)), shape=(size,size) ).T \n self.adjacency2 = downMCOO.tocsr() \n\n # Catch pathological cases - sometimes if there is a flat spot on the boundary, then \n # the filling method above will produce a non-square matrix. This is caused by\n # repetition of values in the COO list which are summed on conversion.\n\n if downMCOO.shape[0] != downMCOO.shape[1]:\n # This approach works but is a lot slower\n\n print \"\"\"\n Warning: the downhill matrices require a slow build method. This is probably\n Because there are degeneracies in the slope - particularly at the boundaries\n A small random perturbation is usually enough to fix this problem\n \"\"\"\n downMat = sparse.lil_matrix((size, size))\n\n for row in range(0, self.tri.npoints): \n downMat[down_neighbour[row],row] = 1.0\n\n for row in range(0, self.tri.npoints): \n if row == down_neighbour[row] or row == down_neighbour1[row]:\n downMat[row,row] = 0.0\n \n self.adjacency2 = downMat.T.tocsr() \n\n return", "def generate_full_adj(self):\n edges = np.zeros(shape=(self.n_balls, self.n_balls))\n row_idx = 0 # start filling adjacency mat from root node\n col_idx = 1 # skip the root node and start from 2nd node\n\n # generate hierarchical interaction strength matrix\n int_strngth = np.zeros(shape=(self.n_balls, self.n_balls))\n # generate hierarchical edge lengths matrix\n lengths = np.zeros(shape=(self.n_balls, self.n_balls))\n for l in range(self.nl):\n for n in range(self.nn[l]):\n edges[row_idx, col_idx:col_idx + self.nc[l]] = 1\n # Fill symmetric the lower triangular (undirected graph)\n edges[col_idx:col_idx + self.nc[l], row_idx] = 1\n\n # same for interaction strength\n int_strngth[row_idx, col_idx:col_idx + self.nc[l]] = self.per_level_l2a_a2d_force[l]\n int_strngth[col_idx:col_idx + self.nc[l], row_idx] = self.per_level_l2a_a2d_force[l]\n\n # same for lengths\n lengths[row_idx, col_idx:col_idx + self.nc[l]] = self.per_level_l2a_a2d_length[l]\n lengths[col_idx:col_idx + self.nc[l], row_idx] = self.per_level_l2a_a2d_length[l]\n\n if l == self.nl - 1 or self.randomize_all_clusters:\n # create full cluster adjacency matrix\n edges[col_idx:col_idx + self.nc[l],\n col_idx:col_idx + self.nc[l]] = \\\n np.ones((self.nc[l], self.nc[l])) - np.eye(self.nc[l])\n # same for interaction strength\n int_strngth[col_idx:col_idx + self.nc[l],\n col_idx:col_idx + self.nc[l]] = \\\n np.ones((self.nc[l], self.nc[l])) * self.per_level_ws_force[l]\n # same for lengths\n lengths[col_idx:col_idx + self.nc[l],\n col_idx:col_idx + self.nc[l]] = \\\n np.ones((self.nc[l], self.nc[l])) * self.per_level_ws_length[l]\n\n # Increase counters after filling connections for a parent node\n col_idx += self.nc[l]\n row_idx += 1\n\n return edges, int_strngth, lengths", "def compute_euclidean_distance_matrix(locations):\n distances = {}\n distances_df=get_times(locations)\n print(distances_df)\n print(distances_df.iloc[0,0])\n print(distances_df.iloc[0,1])\n print(distances_df.iloc[0,2])\n for from_counter, from_node in enumerate(locations):\n distances[from_counter] = {}\n for to_counter, to_node in enumerate(locations):\n distances[from_counter][to_counter] = (int(\n distances_df.iloc[from_counter,to_counter]))\n return distances", "def compute_dist_matrix(X1, X2, distance):\n N, M = X1.shape[0], X2.shape[0]\n dist_matrix = np.zeros((N, M))\n for i in range(N):\n for j in range(M):\n dist_matrix[i][j] = dist(X1[i], X2[j], distance=distance)\n return dist_matrix", "def neighborJoining(distances):\n\n tree = {}\n\n while(len(distances.keys()) > 2):\n\n r = calcRs(distances)\n M = makeMMatrix(distances, r)\n\n smallest = 10000\n smallestKey = (\"\",\"\")\n\n #Find nearest neighbors\n for key in M.keys():\n for subkey in M[key].keys():\n if M[key][subkey] < smallest:\n smallest = M[key][subkey]\n smallestKey = (key, subkey)\n\n #Add new node and update distances to rest of tree\n newname = smallestKey[0] + \"-\" + smallestKey[1]\n distances[newname] = {}\n tree[smallestKey[0]] = {}\n tree[smallestKey[1]] = {}\n dij = distances[smallestKey[0]][smallestKey[1]]\n for key in M.keys():\n if key in smallestKey:\n continue\n distances[newname][key] = .5*(distances[smallestKey[0]][key] \\\n + distances[smallestKey[1]][key] - dij)\n distances[key][newname] = distances[newname][key]\n\n #Update distances to parents of node\n dik = (dij + r[smallestKey[0]] - r[smallestKey[1]])/2\n tree[smallestKey[0]][newname] = dik\n tree[smallestKey[1]][newname] = dij-dik\n detachDict(distances, smallestKey[0], smallestKey[1])\n\n #Connect final two nodes\n tree[distances.keys()[0]] = {}\n tree[distances.keys()[0]][distances[distances.keys()[0]].keys()[0]] =\\\n distances[distances.keys()[0]][distances[distances.keys()[0]].keys()[0]] \n return tree", "def weightedadjacencynormal(X, F):\n\n eps = 1e-6\n N = normal(X, F)\n n = X.shape[0]\n\n # compute weights for all links (euclidean distance)\n wdist = np.sqrt(np.concatenate([((X[F[:, 0], :]-X[F[:, 1], :])**2).sum(1),\n ((X[F[:, 0], :]-X[F[:, 2], :])**2).sum(1),\n ((X[F[:, 1], :]-X[F[:, 0], :])**2).sum(1),\n ((X[F[:, 1], :]-X[F[:, 2], :])**2).sum(1),\n ((X[F[:, 2], :]-X[F[:, 0], :])**2).sum(1),\n ((X[F[:, 2], :]-X[F[:, 1], :])**2).sum(1)]))\n\n # compute weights for all links (euclidean distance)\n wnormal = np.sqrt(np.concatenate([((N[F[:, 0], :]-N[F[:, 1], :])**2).sum(1),\n ((N[F[:, 0], :]-N[F[:, 2], :])**2).sum(1),\n ((N[F[:, 1], :]-N[F[:, 0], :])**2).sum(1),\n ((N[F[:, 1], :]-N[F[:, 2], :])**2).sum(1),\n ((N[F[:, 2], :]-N[F[:, 0], :])**2).sum(1),\n ((N[F[:, 2], :]-N[F[:, 1], :])**2).sum(1)]))\n\n wdist /= wdist.mean()\n wnormal /= wnormal.mean()\n weights = (wdist + wnormal + eps)**(-1)\n\n rows = np.concatenate([F[:, 0], F[:, 0], \n F[:, 1], F[:, 1], \n F[:, 2], F[:, 2]])\n\n cols = np.concatenate([F[:, 1], F[:, 2], \n F[:, 0], F[:, 2], \n F[:, 0], F[:, 1]])\n\n combos = np.column_stack([rows, cols])\n\n [rc, idx] = np.unique(combos, axis=0, return_index=True)\n weights = weights[idx]\n\n W = sparse.csr_matrix((weights, (rc[:, 0], rc[:, 1])), shape=(n, n))\n\n return W", "def calc_dist_matrix(self,verbose=False):\n\n print(\"Calculating distance matrix.\"); sys.stdout.flush()\n\n nrow = self.data_vector.shape[0]\n self.dist_matrix = np.zeros((nrow, nrow),dtype=float)\n for i in range(nrow):\n if verbose:\n if i % 1000 == 0:\n print(\"Row\",i,\"of\",nrow)\n sys.stdout.flush()\n\n for j in range(i + 1, nrow):\n self.dist_matrix[i,j] = self._pairwise_dist(self.data_vector[i],self.data_vector[j])\n self.dist_matrix[j,i] = self.dist_matrix[i,j]\n \n self.dist_frame = pd.DataFrame(self.dist_matrix,\n index = self.seq_strings,\n columns = self.seq_strings)", "def adj(self):\n\t\tres = SquareMatrix(self._rows)\n\t\tfor i in range(self._rows):\n\t\t\tfor j in range(self._rows):\n\t\t\t\tres[i][j] = ((-1) ** (i + j)) * self.minor(j, i)\n\t\treturn res", "def fill_weight_matrix(self,adjMatrix):\n\t\tassert len(adjMatrix[0]) == self._size\n\t\tself.myWeights = self.init_zero_matrix(self._size,self._size)\n\t\tfor i in range(0,self._size):\n\t\t\tfor j in range(0,self._size):\n\t\t\t\t# TODO: change for plain tabu-machine if needed (-A to move to high level function)\n\t\t\t\tself.myWeights[i][j] = 2 * (1 - adjMatrix[i][j])*(1-self.kron(i,j))", "def _create_affinity_matrix(mesh):\n\n l = len(mesh.faces)\n print(\"mesh_segmentation: Creating distance matrices...\")\n distance_matrix = _create_distance_matrix(mesh)\n\n print(\"mesh_segmentation: Finding shortest paths between all faces...\")\n # for each non adjacent pair of faces find shortest path of adjacent faces\n W = scipy.sparse.csgraph.dijkstra(distance_matrix)\n inf_indices = numpy.where(numpy.isinf(W)) # ไธๅฏ่พพ็š„\n W[inf_indices] = 0 # ๆ ‡่ฎฐไธๅฏ่พพ็š„\n\n print(\"mesh_segmentation: Creating affinity matrix...\")\n # change distance entries to similarities\n sigma = W.sum() / (l ** 2) # ๅนณๆ–น\n den = 2 * (sigma ** 2)\n W = numpy.exp(-W / den)\n W[inf_indices] = 0\n numpy.fill_diagonal(W, 1) # ๅ……ไปปๆ„็ปดๅบฆ็š„ๆ•ฐ็ป„็š„ไธปๅฏน่ง’็บฟใ€‚\n\n return W", "def makeMMatrix(distances, r):\n M = copy.deepcopy(distances)\n\n for key in M.keys():\n for subkey in M[key].keys():\n M[key][subkey] -= (r[key] + r[subkey])\n\n return M", "def get_adjacency(self, entity_names, relation_type, weight='weight'):\n sources, targets, weights = zip(\n *[\n (entity_name, target_entity_name, a_weight)\n for entity_name in entity_names\n for (target_entity_name,\n a_weight) in self.get_entity_out_neighbors(\n entity_name, [relation_type], weight=weight\n )\n ]\n )\n connected_entities = list(set(sources) | set(targets))\n n = len(connected_entities)\n index_to_entity_name = dict(enumerate(connected_entities))\n entity_name_to_index = {\n entity_name: index\n for index, entity_name in index_to_entity_name.items()\n }\n return coo_matrix(\n (\n np.array(weights), (\n np.array(\n [\n entity_name_to_index[entity_name]\n for entity_name in sources\n ]\n ),\n np.array(\n [\n entity_name_to_index[entity_name]\n for entity_name in targets\n ]\n )\n )\n ),\n shape=(n, n)\n )", "def get_adjacency_matrix(self, rearranged_data):\n data = np.ones(self.num_edges)\n matrix = csr_matrix((data, (rearranged_data['FromNodeId'], rearranged_data['ToNodeId'])),\n shape=(self.num_nodes, self.num_nodes))\n return matrix", "def adjacencyMatrixplot(nodes):\n adMat = np.zeros((len(nodes), len(nodes)), int)\n for node in nodes:\n if (node.id == 0):continue\n parent, child = node.parent, node.id # -1 -> tally with list indices\n adMat[parent, child] = 1\n return adMat", "def get_distances(self):\n N = len(self.cells) # Number of cells\n distances = np.zeros([N, N]) # distances between cells\n positions = self.position_matrix() # positions of cells \n \n # get distances between cells (exploit symmetry between upper and lower triangular form)\n for i, position in enumerate(positions[:-1, :]): # Iterate matrix except the last one\n directions = positions[i+1:, :] - position # direction from i to j > i\n distances[i, i+1:] = np.linalg.norm(directions, axis=1) # length of directions\n \n return distances + distances.T # Add lower triangle of matrix to upper ", "def build_distance_matrix(path_to_embeddings):\n\n embed_df = pd.read_csv(path_to_embeddings)\n print (\"length is: \", len(embed_df))\n columns = list(embed_df)\n\n \n distances = euclidean_distances(embed_df.iloc[:, 1:], embed_df.iloc[:, 1:])\n embed_df = embed_df.set_index([columns[0]])\n # format distance matrix\n distances_df = pd.DataFrame(distances)\n distances_df.columns = list(embed_df.index)\n distances_df.index = list(embed_df.index)\n\n print (\"finished building the distance matrix ...\")\n\n print (\"///////////////////\")\n print (len(distances_df))\n\n return distances_df", "def Distmatrix(self):\n self.Dismatrix = np.zeros((self.nodenum, self.nodenum))\n for i in range(len(self.Dismatrix)):\n for j in range(len(self.Dismatrix)):\n self.Dismatrix[i, j] = sf.dist(self.y[i], self.x[i], self.y[j], self.x[j])\n self.Dismatrix[j, i] = self.Dismatrix[i, j]", "def feature_calculator(args, graph):\n index_1 = [edge[0] for edge in graph.edges()]\n index_2 = [edge[1] for edge in graph.edges()]\n values = [1 for edge in graph.edges()]\n node_count = max(max(index_1)+1,max(index_2)+1)\n adjacency_matrix = sparse.coo_matrix((values, (index_1,index_2)),shape=(node_count,node_count),dtype=np.float32)\n degrees = adjacency_matrix.sum(axis=0)[0].tolist()\n degs = sparse.diags(degrees, [0])\n normalized_adjacency_matrix = degs.dot(adjacency_matrix)\n target_matrices = [normalized_adjacency_matrix.todense()]\n powered_A = normalized_adjacency_matrix\n if args.window_size > 1:\n for power in tqdm(range(args.window_size-1), desc = \"Adjacency matrix powers\"):\n powered_A = powered_A.dot(normalized_adjacency_matrix)\n to_add = powered_A.todense()\n target_matrices.append(to_add)\n target_matrices = np.array(target_matrices)\n return target_matrices", "def _generate_adjacency_matrices(self):\n self.adj_matrices = dict()\n mes = []\n args = []\n for metaedge in self.metaedges:\n mes.append(metaedge)\n args.append(self._prepare_parallel_adj_matrix_args(self.edge_df.query('abbrev == @metaedge')))\n res = parallel_process(array=args, function=mt.get_adj_matrix, use_kwargs=True, n_jobs=self.n_jobs,\n front_num=0)\n for metaedge, matrix in zip(mes, res):\n self.adj_matrices[metaedge] = matrix", "def getDistanceMatrix(self):\n v = self.getVectors()\n vLis = v.keys()\n N = len(v.keys())\n D = np.zeros([N, N], dtype=np.float32)\n print(N)\n for i in range(N):\n print(\"%d/%d\" %(i, N))\n D[i, i] = 1\n for j in range(i + 1, N):\n dist = self.cosin_sim_pairs(v[vLis[i]], v[vLis[j]])\n D[i, j] = dist\n D[j, i] = dist\n return D", "def gradop(adj: np.ndarray) -> sp.csr_matrix:\n\n e = np.array(adj.nonzero())\n ne = e.shape[1]\n nv = adj.shape[0]\n i, j, x = np.tile(range(ne), 2), e.flatten(), np.repeat([-1, 1], ne)\n\n return sp.csr_matrix((x, (i, j)), shape=(ne, nv))", "def floyd_warshall_predecessor_and_distance(G, weight='weight'):\n from collections import defaultdict\n # dictionary-of-dictionaries representation for dist and pred\n # use some defaultdict magick here\n # for dist the default is the floating point inf value\n dist = defaultdict(lambda: defaultdict(lambda: float('inf')))\n for u in G:\n dist[u][u] = 0\n pred = defaultdict(dict)\n # initialize path distance dictionary to be the adjacency matrix\n # also set the distance to self to 0 (zero diagonal)\n undirected = not G.is_directed()\n for u, v, d in G.edges(data=True):\n e_weight = d.get(weight, 1.0)\n dist[u][v] = min(e_weight, dist[u][v])\n pred[u][v] = u\n if undirected:\n dist[v][u] = min(e_weight, dist[v][u])\n pred[v][u] = v\n for w in G:\n for u in G:\n for v in G:\n if dist[u][v] > dist[u][w] + dist[w][v]:\n dist[u][v] = dist[u][w] + dist[w][v]\n pred[u][v] = pred[w][v]\n return dict(pred), dict(dist)", "def test_distance_weight(self):\n knn = Knn(n_neighbors=3)\n distances = np.array([2, .3, 4])\n weights = knn._distance_weights(distances)\n assert np.allclose(weights, np.array([[1/2, 2], [1/.3, .3], [1/4, 4]])), \"distance_weights are not correct\"", "def floyd_warshall(self):\n distance = {}\n path_dict = {}\n for from_node in self.nodes():\n distance[from_node] = {}\n path_dict[from_node] = {}\n for node in self.nodes():\n distance[from_node][node] = sys.maxsize\n path_dict[from_node][node] = None\n distance[from_node][from_node] = 0\n neighbors = self.neighbors(from_node)\n for neighbor in neighbors:\n distance[from_node][neighbor] = neighbors[neighbor]\n path_dict[from_node][neighbor] = neighbor\n for k in self.nodes():\n for i in self.nodes():\n for j in self.nodes():\n if distance[i][k] + distance[k][j] < distance[i][j]:\n distance[i][j] = distance[i][k] + distance[k][j]\n path_dict[i][j] = path_dict[i][k]\n return path_dict, distance", "def _generate_distance_kernel_matrix(self):\n with self._rw_lock.read_lock():\n # Create matrix whose elements are the distances between all row\n # permutations\n fmat = self._feature_mat # shorter name\n num_rows = fmat.shape[0]\n\n # distance kernel is a square matrix based on feature samples\n dist_kernel = np.mat(np.ndarray((num_rows,)*2))\n self._log.info(\"Creating distance kernel with shape %s\",\n dist_kernel.shape)\n\n timer_log = logging.getLogger('.'.join((self.__module__,\n self.__class__.__name__,\n \"SimpleTimer\")))\n\n for i in xrange(num_rows - 1):\n with SimpleTimer('computing distances from row %d to [%d-%d]'\n % (i, i+1, num_rows-1), timer_log):\n dist_kernel[i, i] = 1.0\n for j in xrange(i + 1, num_rows):\n dist = self._histogram_intersection_distance(fmat[i],\n fmat[j])\n dist_kernel[i, j] = dist_kernel[j, i] = dist\n dist_kernel[-1, -1] = 1.0\n return dist_kernel", "def _freespace_matrix(distance):\n\n return np.array([[1., distance], [0., 1.]])", "def dijkstra(map):\n width = len(map[0])\n height = len(map)\n\n is_in_tree = [[False for _ in range(width)] for _ in range(height)]\n distance = [[MAXINT for _ in range(width)] for _ in range(height)]\n parent = [[None for _ in range(width)] for _ in range(height)]\n distance[0][0] = 0\n\n # (row, col)!!!!\n curcell = (0, 0)\n next_cell = None\n weight = 0\n best_distance_so_far = MAXINT\n\n while not is_in_tree[curcell[0]][curcell[1]]:\n is_in_tree[curcell[0]][curcell[1]] = True\n neighbors = [\n adj for adj in __get_adjacent_4c(curcell, height, width) if map[adj[0]][adj[1]] != 1\n ]\n\n for n in neighbors:\n cand_distance = distance[curcell[0]][curcell[1]] + 1\n if distance[n[0]][n[1]] > cand_distance:\n distance[n[0]][n[1]] = cand_distance\n parent[n[0]][n[1]] = curcell\n\n # Find the closest non-tree node---at least one would've been \"relaxed\"\n # by the loop above. Could be improved by a priority queue.\n best_distance_so_far = MAXINT\n for row in range(height):\n for col in range(width):\n node_dist = distance[row][col]\n if not is_in_tree[row][col] and best_distance_so_far > node_dist:\n best_distance_so_far = node_dist\n curcell = (row, col)\n\n return distance[height - 1][width - 1] + 1", "def create_dist_matrix(matrix):\n #Convert input data matrix to numpy matrix\n matrix = np.array(matrix)\n n = matrix.shape[0]\n \n #Iterate through number of samples to create distance matrix\n for i in range(n):\n dist_array = euclidean_distance(matrix[i,:], matrix)\n if i == 0:\n dist_matrix = dist_array\n else:\n dist_matrix = np.concatenate((dist_matrix, dist_array), axis = 1)\n return dist_matrix", "def nm_dist_mat(self):\n mat = np.zeros([self.N, self.M])\n for n in range(self.N):\n for m in range(self.M):\n mat[n, m] = distance(self.N_coords[n], self.M_coords[m])\n return mat", "def adjacency_matrix(cluster_pred):\n #print('adjacency start')\n x = cluster_pred.copy()\n if(len(x.shape) == 1):\n x = x[:, np.newaxis]\n # Force the cluster indexing to be positive integers\n if(x.min() <= 0):\n x += -x.min() + 1\n\n A = np.dot(x**-1., x.T) == 1\n #print('adjacency end')\n return A", "def compute_distance(self, transpose=False):\n\n # Calculate distance matrix\n if transpose:\n distance_matrix = pdist(self.matrix.T, self.distance)\n else:\n distance_matrix = pdist(self.matrix, self.distance)\n\n # Remove NaNs\n distance_matrix[np.isnan(distance_matrix)] = 1.0\n\n return distance_matrix", "def get_distance_matrix_from_graph(network, nodelist):\r\n if nodelist:\r\n apspnodes = nodelist\r\n else:\r\n apspnodes = network.nodes()\r\n \r\n mapping = {}\r\n for index, node in enumerate(apspnodes):\r\n mapping.update({node:index})\r\n \r\n nodeset = set(apspnodes)\r\n n = len(apspnodes)\r\n D = numpy.zeros((n,n))\r\n for node in apspnodes:\r\n level = 0\r\n levelnodes = {node}\r\n seen = {}\r\n \r\n while levelnodes:\r\n worklist = levelnodes\r\n levelnodes = {}\r\n for target in worklist:\r\n if target not in seen:\r\n if target in nodeset:\r\n D[mapping[node], mapping[target]] = level\r\n seen[target] = level\r\n \r\n try:\r\n levelnodes.update(network[target])\r\n except KeyError:\r\n print \"Error: The specified node '%s' could not be found in the network\" % target\r\n sys.exit(1)\r\n level = level + 1\r\n \r\n return D, mapping", "def format_distance_matrix(labels, data):\r\n return format_matrix(data, labels, labels)", "def kinase_distance_matrix(\n structure_distances, by=\"minimum\", fill_diagonal=True, coverage_min=0.0\n):\n\n if by == \"size\":\n fill_diagonal = False\n\n # Data for upper half of the matrix\n pairs_upper = kinase_distances(structure_distances, by, coverage_min).reset_index()[\n [\"kinase.1\", \"kinase.2\", \"distance\"]\n ]\n # Data for lower half of the matrix\n pairs_lower = pairs_upper.rename(columns={\"kinase.1\": \"kinase.2\", \"kinase.2\": \"kinase.1\"})\n\n # Concatenate upper and lower matrix data\n pairs = (\n pd.concat([pairs_upper, pairs_lower])\n .sort_values([\"kinase.1\", \"kinase.2\"])\n .drop_duplicates()\n .reset_index(drop=True)\n )\n\n # Convert to matrix\n matrix = pairs.pivot(columns=\"kinase.2\", index=\"kinase.1\", values=\"distance\")\n\n if fill_diagonal:\n np.fill_diagonal(matrix.values, 0)\n\n # If matrix contains number of structure pairs: NaN > 0, cast to int\n if by == \"size\":\n matrix = matrix.fillna(0)\n matrix = matrix.astype(\"int64\")\n\n return matrix", "def distance(self, method=\"euclidean\", **kwargs):\n return Adjacency(\n pairwise_distances(self, metric=method, **kwargs), matrix_type=\"Distance\"\n )", "def calculate_whole_dist_matrix(nodes, links, mode, ED):\n # Used in uncertainty calculation. Only consider tree distance mode.\n mode = 'td-mapping'\n G=nx.Graph()\n G.add_nodes_from([0, len(nodes)])\n for i in range(0, len(links)):\n E_dist = np.linalg.norm(nodes[links[i][0]][2]-nodes[links[i][1]][2])\n G.add_edge(links[i][0],links[i][1],weight=E_dist)\n dist = np.zeros((len(nodes), len(nodes)))\n for i in range(0, len(nodes)):\n for j in range(0, len(nodes)):\n if mode == \"td-mapping\":\n dist[i,j] = nx.shortest_path_length(G, source=i, target=j, weight='weight')\n if mode == \"ed-mapping\":\n dist[i,j] = np.linalg.norm(nodes[i][[0,2]]-nodes[j][[0,2]])\n if mode == \"et-mapping\":\n dist[i,j] = (1-ED)*nx.shortest_path_length(G, source=i, target=j, weight='weight')+ED*np.linalg.norm(nodes[i][[0,2]]-nodes[j][[0,2]])\n return dist", "def getDistanceMatrix(self):\n return self.distmat.as_matrix()", "def adjacency_spectrum(G, weight=\"weight\"):\n import scipy as sp\n\n return sp.linalg.eigvals(nx.adjacency_matrix(G, weight=weight).todense())", "def calculate_dist_mat(embeddings: np.ndarray, norm: int) -> np.ndarray:\n kwargs = {'p': norm}\n condensed_dist = pdist(embeddings, metric='minkowski', **kwargs)\n dist_mat = squareform(condensed_dist)\n return dist_mat", "def normalize_adj(adj): # ๅ›พๅฝ’ไธ€ๅŒ–ๆ“ไฝœ\n adj = sp.coo_matrix(adj) # A_hat\n rowsum = np.array(adj.sum(1))\n d_inv_sqrt = np.power(rowsum, -0.5).flatten()\n d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0.\n d_mat_inv_sqrt = sp.diags(d_inv_sqrt) # D_hat\n return adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).tocoo()", "def floydWarshall(graph):\n \"\"\" initializing the solution matrix same as input graph matrix\n OR we can say that the initial values of shortest distances\n are based on shortest paths considerting no \n intermedidate vertices \"\"\"\n V = len(graph[0])\n dist = [[elem for elem in line] for line in graph]\n \n \"\"\" Add all vertices one by one to the set of intermediate\n vertices.\n ---> Before start of a iteration, we have shortest distances\n between all pairs of vertices such that the shortest\n distances consider only the vertices in set \n {0, 1, 2, .. k-1} as intermediate vertices.\n ----> After the end of a iteration, vertex no. k is\n added to the set of intermediate vertices and the \n set becomes {0, 1, 2, .. k}\n \"\"\"\n for k in range(V):\n \n # pick all vertices as source one by one\n for i in range(V):\n \n # Pick all vertices as destination for the\n # above picked source\n for j in range(V):\n \n # If vertex k is on the shortest path from \n # i to j, then update the value of dist[i][j]\n dist[i][j] = min(dist[i][j], dist[i][k] + dist[k][j])\n\n for line in dist:\n print line\n\n return dist", "def adjacency_matrix(edge_index: nb.int64[:,:],\n n: nb.int64) -> nb.boolean[:,:]:\n adj_mat = np.eye(n, dtype=np.bool_)\n for e in edge_index:\n adj_mat[e[0],e[1]] = True\n return adj_mat", "def cal_distances(embeddings):\n # calculate\n dist = np.zeros([len(embeddings), len(embeddings)], dtype=float)\n for ii in xrange(len(embeddings)):\n for jj in xrange(ii + 1, len(embeddings)):\n dist[ii, jj] = np.linalg.norm(embeddings[ii] - embeddings[jj])\n dist[jj, ii] = dist[ii, jj] \n \n # return\n return dist", "def edge2adj(edge_index,edge_weight,num_nodes):\n adj = torch.sparse.FloatTensor(edge_index, edge_weight, torch.Size([num_nodes,num_nodes]))\n return adj", "def adjacency(self, r=5., sigma_B2=.02, sigma_X2=3.):\n #initialize A as a sparse matrix and D as a vector\n m = self.scaled.shape[0]\n n = self.scaled.shape[1]\n A = sparse.lil_matrix((m*n,m*n))\n D = np.ones((m*n,1))\n #fill in nonzero elements of A one row at a time\n for i in range(m*n):\n indices, distance = get_neighbors(i, r, m, n)\n for j in range(len(indices)):\n #find the set of all vertices that satisfy the conditions\n if distance[j] < r:\n A[i, indices[j]] = np.exp(-abs(self.flat_brightness[i]-self.flat_brightness[indices[j]])/sigma_B2-distance[j]/sigma_X2)\n else:\n A[i, indices[j]] = 0\n #convert A to a csc matrix\n A = A.tocsc()\n #update D as column sum of A\n D = np.array(A.sum(axis=0))[0]\n return A, D", "def edges_to_adjacency_matrix(mesh):\n adja = graph.edges_to_coo(mesh.edges,\n data=np.ones(len(mesh.edges),\n dtype=np.int8))\n\n return sparse.triu(adja) + sparse.tril(adja).transpose()", "def adjacency( graph : SpatialGraph, \n normalize : bool = True,\n sparse : bool = False\n ) -> np.ndarray :\n if graph.directed:\n raise NotImplementedError(\"Directed graphs are currently not supported.\")\n dtype = np.float if normalize else np.int\n\n adj = np.zeros((graph.num_nodes, graph.num_nodes), dtype=dtype)\n if sparse:\n adj = sp.coo_matrix(adj)\n for node in graph.nodes.values():\n for adj_node in node.neighbours.values():\n adj[node.id, adj_node.id] = 1\n return normalize_adj(adj, sparse) if normalize else adj", "def floyd_warshall_numpy(G, nodelist=None, weight='weight'):\n try:\n import numpy as np\n except ImportError:\n raise ImportError(\n \"to_numpy_matrix() requires numpy: http://scipy.org/ \")\n\n # To handle cases when an edge has weight=0, we must make sure that\n # nonedges are not given the value 0 as well.\n A = nx.to_numpy_matrix(G, nodelist=nodelist, multigraph_weight=min,\n weight=weight, nonedge=np.inf)\n n, m = A.shape\n I = np.identity(n)\n A[I == 1] = 0 # diagonal elements should be zero\n for i in range(n):\n A = np.minimum(A, A[i, :] + A[:, i])\n return A", "def get_laplacian(adjacency: sparse.csr_matrix) -> sparse.csr_matrix:\n weights = adjacency.dot(np.ones(adjacency.shape[0]))\n return sparse.diags(weights) - adjacency", "def from_sparse_matrix(self, matrix, node_names=None, directed=False, *args, **kwargs):\n\t\tN = list()\n\t\tE = dict()\n\t\tneighbours = dict()\n\n\t\t# Assert Square Adjacency Matrix\n\t\tif matrix.shape[0] != matrix.shape[1]:\n\t\t\traise ValueError('Adjacency Matrix not square')\n\n\t\tN = list( np.arange(matrix.shape[0]) )\n\t\tneighbours = {i:[] for i in np.arange(matrix.shape[0])}\n\t\t#\n\t\trows,cols = matrix.nonzero()\n\t\tfor i,j in zip(rows,cols):\n\t\t\t# the diagonal is (must be) always zero (distance = 0)\n\t\t\tif i==j:\n\t\t\t\tcontinue\n\t\t\t# infinite distance doesn't have to be calculated\n\t\t\telif matrix[i,j] == np.inf:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tE[ (i,j) ] = float(matrix[i,j])\n\t\t\t\tneighbours[i].append(j)\n\n\t\treturn Dijkstra(N, E, neighbours, node_names, directed, *args, **kwargs)", "def dist_matrix(self):\n return self.__dist_matrix" ]
[ "0.73236233", "0.7304077", "0.6892268", "0.68493205", "0.6665239", "0.66055983", "0.6592866", "0.6582315", "0.6490363", "0.6472683", "0.64540255", "0.6431121", "0.63700914", "0.63675284", "0.6365074", "0.63617116", "0.6358854", "0.6308832", "0.6292462", "0.628894", "0.6278451", "0.62595737", "0.62595737", "0.62273115", "0.6210341", "0.61976194", "0.61928695", "0.6185228", "0.6185047", "0.61841565", "0.6169472", "0.616631", "0.61641395", "0.6146792", "0.6132945", "0.60863936", "0.60761845", "0.60604024", "0.6058371", "0.6057357", "0.60469764", "0.60383207", "0.6035424", "0.60319334", "0.603176", "0.59804493", "0.59804493", "0.5974842", "0.5910501", "0.59067047", "0.58965194", "0.5894885", "0.5893558", "0.5887395", "0.58767706", "0.5869692", "0.5868387", "0.5861178", "0.5860588", "0.58583844", "0.58536917", "0.5839571", "0.583171", "0.5826699", "0.58243454", "0.5805691", "0.5801963", "0.58015096", "0.5795371", "0.579313", "0.578473", "0.578236", "0.57766885", "0.57383835", "0.57368785", "0.57226914", "0.57163024", "0.57125676", "0.57098883", "0.5696523", "0.56901735", "0.5689094", "0.56731015", "0.5667011", "0.5665415", "0.5661638", "0.5654138", "0.56436133", "0.5642143", "0.56411743", "0.5632812", "0.5629019", "0.56274664", "0.5619955", "0.5600115", "0.55977553", "0.5578527", "0.5574015", "0.55616945", "0.5559859" ]
0.58099884
65
The purpose of this code is to be able to quickly load any kind of data using pickle.
def Load_Data(savedfilename): import pickle try: with open(savedfilename, 'rb') as handle: loaded_data = pickle.load(handle) print 'loaded successfully, fileloaded as as:\nloaded_data' return loaded_data except: import numpy as np loaded_data = np.load(savedfilename) return loaded_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def loads(data):\n return cPickle.loads(data)", "def load_pickle_data(filename):\n path = \"../tmp/{}.pckl\".format(filename)\n if os.path.exists(path):\n print(\"LOADING PCKL FILE FROM {}\".format(path))\n f = open(path, 'rb')\n obj = pickle.load(f)\n f.close()\n return obj", "def load_data():\n with open('data.pickle', 'rb') as f:\n data = pickle.load(f)\n return data", "def load_synthetic_data():\n\n pickle_object = FM().data_file \n\n with pickle_object.open('rb') as data_file: \n return pickle.load(data_file)", "def _load_obj(name):\n with open('/bigdisk/pickles/' + name, 'r') as f:\n return pickle.load(f)", "def load_obj(name):\n with open('../../data/' + name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def load_obj(name):\n with open('../../data/' + name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def test_pickle_load(self):\n l = [1, 2, 3, 4, 5]\n self.plugin.save_data(l)\n\n l = self.plugin.load_data()\n self.assertIn(4, l)", "def pickle_loader(fileobj):\n if isinstance(fileobj, bytes):\n data = pickle.loads(fileobj, encoding=\"latin1\")\n elif isinstance(fileobj, six.string_types):\n with open(fileobj, 'rb') as f:\n data = pickle.load(f, encoding=\"latin1\")\n elif hasattr(fileobj, 'read'):\n data = pickle.load(fileobj, encoding=\"latin1\")\n else:\n raise ValueError('fileobj is not a filename or a file object')\n return data", "def load_obj(name):\r\n with open('../pickle/' + name + '.pkl', 'rb') as fout:\r\n return pickle.load(fout)\r\n # end with\r", "def loadStuff(path=None):\n\n if path == None:\n print(\"No path specified\")\n return\n\n try:\n pkl_file = open(path, 'rb')\n obj = cPickle.load(pkl_file)\n pkl_file.close()\n print('Data correctly loaded and returned')\n return obj\n\n except IOError as e:\n #print \"I/O error({0}):{1}\".format(e.errno, e.strerror)\n print('I/O error')\n except:\n print(\"Unexpected error\" % sys.exc_info()[0])\n raise", "def pickleLoad(filename):\n #Todo: Handle exceptions from pickle\n filehandler = open(\"obj/\" + filename + \".obj\", 'rb')\n object = pickle.load(filehandler)\n return object", "def loadObj(name):\n\n with open(name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def load_pickle(path):\n with open(path, 'rb') as f:\n data = pickle.load(f)\n return data", "def loadPickle(pickle_file):\n print(\"Loading pickle data from file: \"+pickle_file)\n\n data = None\n try:\n with open(pickle_file, \"rb\") as fd:\n data = pickle.load(fd)\n except EOFError:\n pass\n except pickle.UnpicklingError as upe:\n print(\"Failed: Loading Pickle Data\")\n except IOError:\n data = {}\n\n return data", "def load(fname):\r\n with open(fname, 'rb') as f:\r\n data = pickle.load(f)\r\n return data", "def load_object(self, filename):\n with open(filename, 'rb') as inp: # Overwrites any existing file.\n data = pickle.load(inp)\n return data", "def load_data(self) -> None:", "def load_obj(path):\n with open(path, 'rb') as f:\n return pickle.load(f)", "def check_pickle() -> list:\n try:\n with open(\"data.pkl\", mode='r+b') as open_pickle:\n data = pickle.load(open_pickle)\n except FileNotFoundError as _:\n data = load_data()\n with open(\"data.pkl\", mode='w+b') as open_pickle:\n pickle.dump(data, open_pickle)\n return data", "def test__pickle_unpickle(self):\n pass", "def pickle_load(path):\n data = pickle.load(open(os.path.join(os.getcwd(), path), 'rb'))\n return data", "def load_data_pickle(PATH, dataset, filename):\n with open(PATH + '/' + dataset + \"_\" + filename + \".pkl\",\"rb\") as f:\n new_data = pickle.load(f)\n\n # print(filename, \"opened\")\n return new_data", "def load_pickle(filename):\n with open(filename, 'rb') as file:\n obj = pickle.load(file)\n return obj", "def __init__(self, name, loadfile=None, loadpath=''):\n \n self.name = name\n \n if loadfile==None:\n self.data = []\n else:\n with open(loadpath+loadfile) as currentfile:\n self.data = pickle.load(currentfile)", "def load_pkl_data(path):\n with open(path, 'rb') as fi:\n data = pickle.load(fi)\n return data", "def load_obj(path: str):\n with open(path, 'rb') as h:\n return pickle.load(h)", "def load_obj(saved_name):\n with open( saved_name + '.pkl', 'rb') as f:\n return pickle.load(f)", "def load_pickle(path):\n assert osp.exists(path)\n # gc.disable()\n with open(path, 'rb') as f:\n ret = pickle.load(f)\n # gc.enable()\n return ret", "def pickle_load(path):\n try:\n data = pickle.load(open(path, \"rb\"))\n return data\n except UnpicklingError:\n unix_path = path.replace(\".pkl\", \"_unix.pkl\")\n try:\n data = pickle.load(open(unix_path, \"rb\"))\n return data\n except FileNotFoundError:\n path = _to_unix(path)\n data = pickle.load(open(path, \"rb\"))\n return data", "def load_pickle(path):\n with open(path, 'rb') as handle:\n return pickle.load(handle)", "def load_pickle(path):\n assert osp.exists(path)\n with open(path, 'r') as f:\n ret = pickle.load(f)\n return ret", "def load(self):\n\n raise NotImplementedError", "def load_data(self):", "def load_pickle(filepath):\n logging.info('Loading object from pickle: {}'.format(filepath))\n with open(filepath, 'rb') as infile:\n return pickle.load(infile)", "def load_pickle(filename):\n\n with open(filename, 'rb') as file:\n if filename.split('.')[-1] == 'dill':\n obj = dill.load(file)\n else:\n obj = pickle.load(file)\n return obj", "def load_pickle(path):\n try:\n debug(\"trying to load pickle data\")\n with open(path, mode='rb') as file:\n debug(\"opened file %s for reading\", path)\n return pickle.load(file, encoding='utf-8')\n except (pickle.UnpicklingError, OSError) as err:\n debug(\"error in pickling from %s, error: %s\", path, err)\n return None", "def handle_pickle(data, name, mode):\n pickle_name = os.path.join(os.path.dirname(pickle_loc), name)\n if mode == \"w\":\n with open(pickle_name, \"wb\") as handle:\n pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL)\n return None\n\n if mode == \"r\":\n with open(pickle_name, \"rb\") as handle:\n b = pickle.load(handle)\n return b", "def read_from_file(name):\n print 'reading structures from pickle'\n print '------------------------------'\n\n path = os.getcwd() + '/pickles/' + name + '.pkl'\n file = open(path, 'rb')\n new_obj = pickle.load(file)\n file.close()\n\n return new_obj", "def pkl_load(name, path = 'obj'):\n if '.p' not in name:\n name = name + '.pkl'\n path = os.path.join(path, name)\n try:\n obj = pickle.load(open(path, 'rb'))\n except FileNotFoundError:\n obj = None\n return obj", "def load(self, which):\n\t\tpath = os.path.join(self.storagedir, which)\n\t\tprint(\"Loading from\", path)\n\t\twith open(path, \"rb\") as handle:\n\t\t\tsetattr(self, which, _pickle.load(handle))", "def loadPickle(filepath):\n\tf = open(filepath, 'rb')\n\tobj = pickle.load(f)\n\tf.close()\n\treturn obj", "def load_data():\r\n print ('Loadng all the file one time......')\r\n if not os.path.exists('cifar.pkl'):\r\n set_data()\r\n with open('cifar.pkl', 'rb') as cifar_pickle:\r\n data = six.moves.cPickle.load(cifar_pickle)\r\n return data", "def pickle(self,data,filename):\n pickle.dump(data, open(filename, 'wb'))", "def load_data_pickle(path):\n with open(path, 'rb') as f:\n documents = pickle.load(f, encoding=\"bytes\")\n print(\"Loaded: {}\".format(path))\n\n return documents", "def load_pickle(filename):\n with open(filename, \"rb\") as f:\n obj = pickle.load(f)\n\n return obj", "def load(filename):\n with open(filename, 'rb') as f:\n return pickle.load(f)", "def load_object(fpath):\r\n with open(fpath, 'rb') as i:\r\n return pickle.load(i)", "def pickle_load(file_name: str) -> Any:\n with open(file_name, 'rb') as file:\n return pickle.load(file)", "def load_data():\r\n f = gzip.open('mnist.pkl.gz', 'rb')\r\n training_data, validation_data, test_data = pickle.load(f,encoding='bytes')\r\n f.close()\r\n return (training_data, validation_data, test_data)", "def load_object(path):\r\n with open(path,\"rb\") as f:\r\n object = pickle.load(f) \r\n return object", "def load_object(filename):\r\n with open(filename, 'rb') as input:\r\n obj = pickle.load(input)\r\n return obj", "def loadData(dataPathFile):\r\n if dataPathFile[-3:] == 'pkl':\r\n dataBaseDict = pickle.load(open(dataPathFile, 'rb'))\r\n return dataBaseDict\r\n else:\r\n raise Exception('File that is trying to be loaded is not a pickle file\\n')", "def load(self):\n f = self.open(\"rb\")\n try:\n import pickle\n\n return error.checked_call(pickle.load, f)\n finally:\n f.close()", "def load_from_pickle(path: str):\r\n if not config.silent:\r\n logger.info(f'Load from {path}')\r\n with open(path, 'rb') as f:\r\n return pickle.load(f)", "def load_data():\n f = gzip.open('../data/mnist.pkl.gz', 'rb')\n training_data, validation_data, test_data = cPickle.load(f)\n f.close()\n return (training_data, validation_data, test_data)", "def test_pickle_save(self):\n l = [1, 2, 3, 4, 5]\n self.plugin.save_data(l)", "def read_pickle(path):\n with open(path, \"rb\") as f:\n data = pickle.load(f)\n\n return data", "def _unpickle(filename):\n\n # Create full path for the file.\n file_path = _get_file_path(filename)\n\n print(\"Loading data: \" + file_path)\n\n with open(file_path, mode='rb') as file:\n # In Python 3.X it is important to set the encoding,\n # otherwise an exception is raised here.\n data = pickle.load(file, encoding='bytes')\n\n return data", "def read_pickle_object(pickle_file_name):\n with open(pickle_file_name, 'rb') as input:\n m = pickle.load(input)\n return m", "def pickleload(path):\n with open(path, 'rb') as file:\n loaded = pickle.load(file)\n return loaded", "def load_from_disk(name):\n shortname = _dumpify(_compress_name(name) + '.pkl')\n print 'load_from_disk(%s)' % shortname\n pkl_file = open(shortname, 'rb')\n object = pickle.load(pkl_file)\n pkl_file.close()\n return object", "def load_and_pickle_mnist():\n\n if os.path.exists(pickle_file):\n print(\"Pickle file found! Unpickling...\")\n with open(pickle_file, \"rb\") as pf:\n mnist = pickle.load(pf)\n else:\n mnist = read_data_sets(data_dir, one_hot=True)\n\n with open(pickle_file, \"wb\") as pf:\n pickle.dump(mnist, pf, pickle.HIGHEST_PROTOCOL)\n\n # Remove .gz files from the mnist download.\n for ptr in glob.glob(os.path.join(data_dir, \"*.gz\")):\n os.remove(ptr)\n\n return mnist", "def getData():\n with open('obj/documents.pkl', 'rb') as file:\n data = pickle.load(file)\n return data", "def pickle_from_file(fname):\n\ttry:\n\t\tfh = open(fname, 'r')\n\t\tdata = cPickle.load(fh)\n\t\tfh.close()\n\texcept:\n\t\t#raise\n\t\tprint \"Loading pickled data failed!\", sys.exc_info()[0]\n\t\tdata = None\n \n\treturn data", "def load(filename):\n with open(filename,'rb') as f:\n return pickle.load(self,f)", "def dump_pickle_data(obj, filename):\n path = \"../tmp/{}.pckl\".format(filename)\n f = open(path, 'wb')\n pickle.dump(obj, f)\n f.close()", "def load_data(file_name):\n with open(file_name, 'rb') as f:\n data = pickle.load(f)\n return data", "def load_object(filename):\n with open(filename, 'rb') as input_file: # Overwrites any existing file.\n obj = pickle.load(input_file)\n return obj", "def run_pickle(data):\n log.info(\"\\n\\n====\")\n log.info('Step 1: Demonstrate persistence with pickle')\n log.info('Write a pickle file with the product data')\n\n pickle.dump(data, open('../data/data.pkl', 'wb'))\n\n log.info('Step 2: Now read it back from the pickle file')\n read_data = pickle.load(open('../data/data.pkl', 'rb'))\n log.info('Step 3: Show that the write and read were successful')\n assert read_data == data\n log.info(\"and print the data\")\n pprint.pprint(read_data)", "def dump_pickle(path, data):\n with open(path, 'wb') as f:\n pickle.dump(data, f)", "def load_data_pickle(self, load_full=False):\n self.train = pd.read_pickle('../input/train_mod.pkl')\n self.test = pd.read_pickle('../input/test_mod.pkl')\n if load_full:\n self.train_full = pd.read_pickle('../input/train_full_mod.pkl')", "def __init__(self,path):\n self.path = path\n self.data = {}\n self.hasChanged = False\n #--Load\n if os.path.exists(self.path):\n ins = open(self.path)\n inData = compat.uncpickle(ins)\n self.data.update(inData)", "def pload(filename):\n return pickle.load(open(filename, 'rb'))", "def loadpickle(fln):\n if not os.path.exists(fln) and os.path.exists(fln + '.gz'):\n gzip = True\n fln += '.gz'\n else:\n try:\n with open(fln, 'rb') as fh:\n try: #Py3k\n return pickle.load(fh, encoding='latin1')\n except TypeError:\n return pickle.load(fh)\n except pickle.UnpicklingError: #maybe it's a gzip?\n gzip = True\n else:\n gzip = False\n if gzip:\n try:\n import zlib\n with open(fln, 'rb') as fh:\n stream = zlib.decompress(fh.read(), 16 + zlib.MAX_WBITS) \n try: #Py3k\n return pickle.loads(stream, encoding='latin1')\n except TypeError:\n return pickle.loads(stream)\n except MemoryError:\n import gzip\n with open(fln) as fh:\n gzh = gzip.GzipFile(fileobj=fh)\n try: #Py3k\n contents = pickle.load(gzh, encoding='latin1')\n except TypeError:\n contents = pickle.load(gzh)\n gzh.close()\n return contents", "def load_pickle(path):\n with open(path, 'rb') as f:\n data = cPickle.load(f)\n images = np.asarray([i/np.float32(255) for i in data['data']])\n labels = np.asarray(data['labels'], dtype='int32')\n X_train, X_test, y_train, y_test = train_test_split(images, labels, test_size=0.2)\n return X_train, y_train, X_test, y_test", "def load_data():\n f = gzip.open('../data/mnist.pkl.gz', mode='rb')\n\n # NOTE: I get errors when I don't use encoding='latin1' because of Python 2 vs Python 3 compatibility issues\n # training_data, validation_data, test_data = pickle.load(f, encoding='latin1')\n training_data, validation_data, test_data = pickle.load(f)\n\n f.close()\n\n return training_data, validation_data, test_data", "def load_pickle(path):\n with open(path, 'rb') as f:\n pickle_file = pickle.load(f)\n return pickle_file", "def load(filename):\n file = gzip.GzipFile(filename, 'rb')\n buffer = \"\"\n while True:\n data = file.read()\n if data == \"\":\n break\n buffer += data\n object = pickle.loads(buffer)\n file.close()\n return object", "def from_pickle(input_path):\n with open(input_path, 'rb') as f:\n unpickler = pickle.Unpickler(f)\n return unpickler.load()", "def load(filename):\n file = gzip.GzipFile(filename, 'rb')\n buffer = \"\"\n while 1:\n data = file.read()\n if data == \"\":\n break\n buffer += data\n object = pickle.loads(buffer)\n file.close()\n return object", "def load(filename):\n import pickle\n return pickle.load(open(filename, 'r'))", "def pickle_data(tdata):\n try:\n if isinstance(tdata, bytes): #Quick check if tdata is already bytes\n data = tdata\n else:\n data = pickle.dumps(tdata)\n except:\n data = False\n return data", "def read_pickle(filename):\n try:\n with gzip.open(filename, 'rb') as f:\n loaded_object = pickle.load(f)\n return loaded_object\n except OSError:\n with open(filename, 'rb') as f:\n loaded_object = pickle.load(f)\n return loaded_object", "def load_pickle(file):\n with open(file, 'rb') as fh:\n datum = pickle.load(fh)\n\n return datum", "def unpickle_data(tdata):\n try:\n if isinstance(tdata, bytes): #Quick check if tdata is already bytes\n data = pickle.loads(tdata)\n else:\n data = tdata\n except:\n data = False\n return data", "def load_object(filename):\n\n with gzip.GzipFile(filename, 'rb') as source: result = source.read()\n ret = pickle.loads(result)\n source.close()\n\n return ret", "def load(self):\n self.word2vec, self.img2sentence, self.word_freq, self.num_words, self.word2idx, self.idx2word = pickle.load(open(self.save_file, 'rb'))", "def read_pickle(path):\n with open(path, 'rb') as file:\n return pickle.load(file)", "def load(self):", "def pickle_data(filename, data):\n f = open(filename, \"wb\")\n pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)\n f.close()", "def _load(self, load_dict):\n try:\n self.v_protocol = load_dict.pop(PickleParameter.PROTOCOL)\n except KeyError:\n # For backwards compatibility\n dump = next(load_dict.values())\n self.v_protocol = PickleParameter._get_protocol(dump)\n for key in load_dict:\n val = load_dict[key]\n self._data[key] = pickle.loads(val)", "def load_pickle(path):\n with open(path, 'rb') as f:\n pickle_file = pickle.load(f)\n file_name = re.findall(r\"/?[^/]+\", path)[-1].strip(\"/\")\n print(f\"Loaded {file_name}.\")\n return pickle_file", "def load(cls,filename):\n obj = None\n f = open(filename,'r')\n try:\n obj = pickle.load(f)\n obj.filename = filename\n finally:\n f.close()\n return obj", "def pickle_data(file_name, data):\n outfile = open(file_name, \"wb\")\n pickle.dump(data, outfile)\n outfile.close()", "def read_pickle(file_name):\n with open(file_name, 'rb') as f:\n obj = pickle.load(f)\n return obj", "def load_pkl_file(p):\n pkl_file = open(p, 'rb')\n obj = pickle.load(pkl_file)\n pkl_file.close()\n return obj", "def dic_b64_and_pickle_loads(data):\n for i in data:\n data[i] = pickle.loads(base64.b64decode(data[i]))\n return data", "def load_data_loader_from_file(cls, filename):\n print(\"Loading data loader from file: {}\".format(filename))\n\n with open(filename, \"rb\") as file:\n return pickle.load(file)", "def load_data(self):\n raise NotImplementedError()" ]
[ "0.7677762", "0.75456476", "0.74007934", "0.7386416", "0.7347391", "0.7306865", "0.7306865", "0.7251167", "0.7192325", "0.71690726", "0.7146814", "0.7142319", "0.71218973", "0.70928425", "0.70679253", "0.7051632", "0.7041608", "0.7021551", "0.694763", "0.69178104", "0.6917255", "0.6892351", "0.6864153", "0.68639594", "0.6857534", "0.685111", "0.6849906", "0.68445706", "0.6840812", "0.6835283", "0.6832002", "0.68221813", "0.6807327", "0.6803961", "0.6792556", "0.67923635", "0.6774662", "0.67510575", "0.67491865", "0.67407125", "0.6740035", "0.67386514", "0.6737524", "0.6734238", "0.67337644", "0.6729532", "0.6728653", "0.67098206", "0.67012346", "0.6701152", "0.6699381", "0.6686561", "0.668539", "0.6680768", "0.66598135", "0.66556644", "0.66500705", "0.6619035", "0.66149896", "0.6612278", "0.6606469", "0.658695", "0.6584321", "0.6583348", "0.6571496", "0.6569693", "0.65522027", "0.6548368", "0.6546173", "0.65384114", "0.65317494", "0.6523172", "0.65165925", "0.64993334", "0.64968747", "0.6489364", "0.648664", "0.64807963", "0.6479717", "0.6474454", "0.64734864", "0.6469118", "0.6465098", "0.6462395", "0.6453129", "0.6452747", "0.64502144", "0.6443613", "0.6439131", "0.64119864", "0.64092743", "0.64030313", "0.64027494", "0.6397606", "0.639646", "0.6394446", "0.6390936", "0.63868576", "0.6379219", "0.637687" ]
0.6825503
31
Reads the entire contents of a file into a single string using the read() method.
def readfile(filename): infile = open(filename, "r") # open file for reading # Use Python's file read function to read the file contents filetext = infile.read().splitlines() infile.close() # close the file return filetext # the text of the file, as a single string
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_file(file):\n with open(file, 'r') as f:\n file_string = f.read()\n return file_string", "def read(file):\n with open(file, 'r') as file:\n return file.read()", "def read_file(path): #TODO implementme, handling paths more intelligently\n f = open(path, \"r\")\n string = f.read()\n f.close()\n return string", "def read_file(self, file: Path) -> str:\n with open(file) as f:\n return f.read()", "def read_file(file):\n with open(file, \"r\") as fid:\n return fid.read()", "def read_file(file_name):\n with open(file_name, 'r') as f:\n return f.read()", "def read_file(file_name):\n with open(file_name, \"r\") as f:\n return f.read()", "def read_from_file(file_name):\n with open(file_name, \"rb\") as text_file:\n return text_file.read()", "def ReadFileIntoString(filepath):\n with open(filepath, 'r') as file_handle:\n contents = file_handle.read()\n return contents", "def read_file(file_path):\n with open(file_path, 'r') as infile:\n return infile.read()", "def read_file(filename: str, mode: str = \"r\") -> str:\n with open(filename, mode) as file:\n file_content = file.read()\n return file_content", "def read_file(filename):\n return open(filename).read()", "def _read_file(self) -> str:\n with open(self._file_name) as fp:\n return fp.read()", "def read_file(file_path, mode='r', encoding=\"utf-8\"):\n with codecs.open(file_path, mode, encoding=encoding) as fp:\n return fp.read().strip()", "def read(self, filename):\n\t\treturn codecs.open(filename, 'r', 'utf8').read()", "def readfile(path: Union[str, Path]) -> str:\n with open(path) as infile:\n return infile.read()", "def read_file(name_file):\n with open(name_file, 'r') as file:\n return file.read()", "def read_file(self, file_name: str)-> str:\n if not os.path.exists(file_name):\n raise IOError(\"The File {} doesn't exists!\".format(file_name))\n\n with open(file_name) as file:\n return file.read().strip()", "def read_file(filename):\n f = open(filename)\n contents = f.read()\n f.close()\n return contents", "def read_file(file):\n f = open(file, \"r\", encoding=\"utf8\")\n return f.read()", "def read(file_name):\n with io.open(os.path.join(os.path.dirname(__file__), file_name),\n encoding='utf-8') as f:\n return f.read()", "def ReadFile(self, filename):\n file = open(filename, 'rb')\n result = \"\"\n try:\n result = file.read()\n finally:\n file.close()\n return result", "def ReadFile(self, filename):\r\n file = open(filename, 'rb')\r\n result = \"\"\r\n try:\r\n result = file.read()\r\n finally:\r\n file.close()\r\n return result", "def _Read(filename):\n with open(filename, 'rb') as f:\n return f.read()", "def SimpleRead(fn):\n content = \"\"\n try:\n content = open(fn).read()\n except :\n print(\"Failed to read file: %s\\n\"%(fn))\n print sys.exc_info()[1]\n\n return content", "def read_file(filepath: str) -> str:\n with open(filepath, \"r\") as filep:\n return filep.read()", "def read_file(filename):\n with open(filename) as fp:\n return fp.read()", "def read(path):\n with open(path) as f:\n return f.read()", "def read_file(file) -> str:\n file = open(file, \"r\")\n my_string = file.read()\n return get_clean_text(my_string)", "def read(path, encoding=\"utf-8\"):\n try:\n with io.open(path, encoding=encoding) as f:\n return f.read()\n except Exception as e:\n logger.error(\"read: %s failed. Error: %s\", path, e)\n return \"\"", "def read_file(self, file_name):\n\n with open(file_name, 'r') as file_input:\n file_content = file_input.read()\n return file_content", "def read_text_file(str_name_file: str):\n content: str = ''\n with open(str_name_file, mode=\"r\", encoding='utf-8') as file:\n print(\"file being read: \" + str_name_file + \"\\n\")\n content = file.read()\n return content", "def read_full_file(filename, options=\"rb+\"):\n with open(filename, options) as f:\n text = f.read()\n return text", "def read_file():\n with open(FILE_NAME) as f:\n data = f.read()\n return data", "def read_file(path):\n try:\n with open(path, 'r') as text_file:\n return \"\".join(text_file.readlines()).strip()\n except IOError:\n exit(\"Error: file '%s' is not readable!\" % path)", "def read(path):\n with open(path) as f:\n contents = f.read()\n return contents", "def read(filename):\n\n path = os.path.join(os.path.dirname(__file__), filename)\n\n with open(path) as f:\n return f.read()", "def readfile(filename):\n with open(filename, encoding=\"utf-8\") as file:\n raw = file.read()\n return raw", "def read_file(filename):\n with codecs.open(filename, 'r', 'utf8') as f:\n return f.read()", "def read_file(filename):\n with codecs.open(filename, 'r', 'utf8') as f:\n return f.read()", "def read_file(file_path):\n contents = None\n # opens the file and by default it sends 'r' as mode, so, it ensures the file will be used only for reading,\n # and not writing, since it's not necessary\n file_to_read = open(file_path)\n if file_to_read.mode == 'r':\n # saves the text of the file into a variable\n contents = file_to_read.read()\n\n # close the file\n file_to_read.close()\n return contents", "def read_file(name):\n with open(name, 'r') as my_file:\n return my_file.read().encode('utf-8')", "def read_file(filename):\n open_kwargs = {}\n if sys.version_info.major == 3:\n open_kwargs = {'encoding': 'utf-8'}\n\n path = os.path.abspath(os.path.dirname(__file__))\n filepath = os.path.join(path, filename)\n with open(filepath, **open_kwargs) as filecontents:\n return filecontents.read()", "def _read_file(file_name):\n file_handle = file(file_name)\n try:\n return file_handle.read()\n finally:\n file_handle.close()", "def read_file(file_path):\n\n text = ''\n with open(file_path, 'r') as file:\n for line in file.readlines():\n text += line\n return text", "def file_read(path: str) -> str:\n if os.path.isfile(path):\n while True:\n try:\n with open(path, \"r\") as fptr:\n return fptr.read()\n except PermissionError:\n pass\n return \"\"", "def read_file(filename):\n path = os.path.abspath(os.path.dirname(__file__))\n filepath = os.path.join(path, filename)\n try:\n return open(filepath).read()\n except IOError:\n return ''", "def contents(file):\n with open(file) as f:\n return f.read()", "def read_file(filename):\n path = os.path.abspath(os.path.dirname(__file__))\n filepath = os.path.join(path, filename)\n try:\n return open(filepath).read()\n except:\n return ''", "def open_and_read_file(file_path):\n\n # Read the file, return text as a string titled \"contents\"\n contents = open(file_path).read()\n\n # Return contents of your file as one long string\n return contents", "def get_file_content(self, file_name: str):\n file_name = Path(__file__).absolute().parents[1].joinpath(file_name)\n try:\n with file_name.open('r') as file:\n intermediate = file.readlines()\n return ''.join(intermediate)\n except FileNotFoundError as message:\n self.logger.error(message)\n return ''", "def file2str(file):\n with open(file, \"r\") as textFile:\n return textFile.read()", "def read_file(file_path):\n\n file_string = ''\n\n with open(file_path, 'r', newline='') as file:\n for line in file:\n file_string = file_string + line.rstrip('\\n')\n\n return file_string", "def read_file(filename):\n if os.path.isfile(filename):\n with open(filename, 'r') as f:\n return f.read()", "def read(name):\n\n return open(name).read()", "def readContent(file):\n \n with open(file, \"r\", encoding = \"utf-8\") as f:\n return f.read()", "def readFromTextFile(self, file_name):\n with open(file_name, 'r') as file_obj:\n return file_obj.read()", "def read(filename):\n with open(os.path.join(os.path.dirname(__file__), filename)) as f:\n return f.read()", "def read(self):\n\t\tself.file.seek(0)\n\t\treturn self.file.read().strip()", "def read_from_file(path):\n with io.open(path, 'rb') as ios:\n return read(ios)", "def file_to_string(file_name):\n with open(file_name, 'r') as f:\n text = f.read()\n # delete original file\n os.remove(file_name)\n return text", "def ReadFile(path, mode='r'):\n with open(path, mode) as f:\n return f.read()", "def read_file(self, file):\n fd = open(file)\n data = fd.read()\n fd.close()\n return data", "def local_read(filename):\n full_filename = os.path.join(\n os.path.abspath(os.path.dirname(__file__)),\n filename)\n return codecs.open(full_filename, 'r', 'utf-8').read()", "def read_file(file_name, enc=\"latin-1\"):\n f = open(file_name, \"r\", encoding=enc)\n content = \"\".join(f.readlines())\n f.close()\n return content", "def read_file(file):\n try:\n with open(file, \"r\") as f:\n content = f.read().replace(\"\\n\", \"\")\n return content\n except:\n return f\"[ERROR]: could not open '{file}'\"", "def fs_read(file_path):\n try:\n with open(str(file_path), 'r') as f:\n return f.read()\n except UnicodeDecodeError:\n with open(str(file_path), 'r', encoding='latin-1') as f:\n return f.read()\n except IOError as e:\n raise e", "def _ReadFile(filepath):\n with open(filepath) as f:\n return f.read()", "def read_contents(path):\n with open(path, 'r') as stream:\n return stream.read()", "def read(cls, path):\n with cls.open(path, 'rt') as fd:\n return fd.read()", "def readText(fileName):\n fileText = \"\"\n with open(fileName,\"r\") as fileObject:\n fileText = fileObject.read()\n \n return fileText", "def read_file(file_path):\n try:\n input_file = open(file_path)\n text_content = input_file.read()\n input_file.close()\n return text_content\n except IOError:\n print (\"Can not read from file\")", "def readFromFile(self, path):\n log(logging.DEBUG, \"Read from file: \" + path)\n with open(path, \"r\") as f:\n return f.read()", "def read_file(*file_name: str) -> str:\n with open(os.path.join(HERE, *file_name)) as f:\n return f.read()", "def readFile(fileName):\n with open(fileName, 'r', encoding='utf-8') as f:\n text = f.read()\n return text", "def _read_one_line_file(name):\n with open(name, \"rb\") as file:\n data = file.read()\n return data.decode('utf-8').strip()", "def read_from_file(filename):\n\twith open(filename, 'r') as myfile:\n\t\ttext=myfile.read()\n\treturn text", "def _readfile(dirpath, filename):\n try:\n with codecs.open(os.path.join(dirpath, filename), \"r\", \"utf-8\") as f:\n return f.read()\n except IOError:\n return u\"\"", "def file_to_string(path_to_file):\n\t\twith open(path_to_file, 'r') as f:\n\t\t\tcontent = f.read()\n\t\treturn content", "def readfile(fname, mode='rb'):\n f = open(fname, mode)\n raw = f.read()\n f.close()\n return raw", "def ReadFile(f_path):\n data = ''\n\n if f_path:\n try:\n fh = open(f_path, 'r')\n try:\n data = fh.read()\n finally:\n fh.close()\n except IOError:\n return ''\n\n return data", "def read_file(filename):\n with codecs.open(os.path.join(here, filename), encoding='utf-8') as f:\n content = f.read()\n return content", "def read_file(file_path: str) -> str:\n try:\n with open(file=file_path, mode='r', encoding=\"utf8\") as f:\n return f.read()\n\n except FileNotFoundError:\n raise FileNotFoundError(f'No text file was found at location {file_path}')", "def open_and_read_file(file_path):\n\n # your code goes here\n text_file = open(file_path)\n text_string= text_file.read()\n text_file.close()\n return text_string", "def _file_read(self, file: str) -> str:\n with open(f\"tests/resources/{file}\", \"r\") as fs:\n result = \"\\n\".join(fs.read().splitlines())\n return result", "def read_file(file):\n f = open(file, 'r')\n print(f.read())", "def read_file(path):\n assert_is_string(path)\n f = open(path, \"r\")\n data = f.read()\n f.close()\n return data", "def _read_file(self, filePath):\n with open(filePath) as f:\n fileContent = f.read()\n f.close()\n return fileContent.strip()", "def read(self, name: str) -> str:\n path = self.get_path(name)\n if not os.path.exists(path):\n return \"\"\n\n with open(path, \"r\") as fh:\n return fh.read()", "def read_file(self, file_name):\n f = file(file_name, \"r\")\n temp = f.read()\n f.close()", "def get_file_text(file_name):\n\tf = open(file_name, 'r')\n\ttext = f.read()\n\treturn text", "def read(fn):\n with open(os.path.join(os.path.dirname(__file__), fn), encoding='utf-8') as f:\n return f.read()", "def fileGetContents(sFilename):\n with open(sFilename) as f:\n return f.read()", "def _read(fname):\n fpath = os.path.dirname(__file__)\n fpath = os.path.join(fpath, fname)\n with open(fpath, 'r') as file_:\n return file_.read()", "def open_and_read_file(file_path):\n text_data = open(file_path).read()\n # print text_data\n return text_data", "def read_file(filename=\"\"):\n with open(filename, 'r') as f:\n f_contents = f.read()\n print(f_contents, end='')", "def readFile(self, name):\n\t\ttry:\n\t\t\tf = open(name, 'r')\n\t\t\tlines = f.readlines()\n\t\t\tf.close()\n\t\texcept IOError:\n\t\t\treturn None\n\n\t\treturn join(lines, \"\")", "def read_raw(file_path):\n file = open(file_path, 'rb')\n content = file.read()\n file.close()\n return content", "def open_and_read_file(file_path):\n\n # Open file and read into memory\n text = open(file_path).read().rstrip()\n\n # Replace newlines with space\n #text = text.replace('\\n', ' ')\n\n return text", "def read_file(file_name):\n return open(os.path.join(os.path.dirname(os.path.dirname(__file__)), file_name)).read()" ]
[ "0.8052589", "0.7846758", "0.7786236", "0.7739382", "0.7731349", "0.7712797", "0.7707567", "0.75548965", "0.75135463", "0.7509113", "0.7501244", "0.74897665", "0.74849457", "0.7474526", "0.7474349", "0.74655485", "0.74514276", "0.74224454", "0.74033785", "0.7400379", "0.7393844", "0.7382184", "0.73817563", "0.7381136", "0.737748", "0.7353659", "0.7352959", "0.7324334", "0.731302", "0.7309944", "0.7308903", "0.7301866", "0.726604", "0.72618616", "0.72616565", "0.7251018", "0.72415125", "0.72168994", "0.71951205", "0.71951205", "0.7181609", "0.7173977", "0.71702844", "0.716496", "0.7142345", "0.7141149", "0.71305966", "0.7124154", "0.7118126", "0.71050453", "0.7098597", "0.7097354", "0.70932996", "0.70818776", "0.7076081", "0.7046534", "0.7036703", "0.7029802", "0.7027523", "0.70128286", "0.70114803", "0.6994683", "0.69922686", "0.69892496", "0.6986786", "0.69693685", "0.6967139", "0.6966177", "0.6963413", "0.69594777", "0.69549155", "0.6948035", "0.6946391", "0.6944704", "0.6941945", "0.69279563", "0.690627", "0.6903403", "0.69020814", "0.6885578", "0.68827075", "0.6876441", "0.6874646", "0.6872849", "0.68723434", "0.68695486", "0.68610257", "0.6860485", "0.6857365", "0.685341", "0.6838817", "0.68343985", "0.6833778", "0.68283594", "0.68037844", "0.6800257", "0.6800229", "0.67934823", "0.675954", "0.67383784" ]
0.72099614
38
This function reads the product database
def read_product(filename=None): if not filename: filename = settings.PRODUCT_FILENAME return pd.read_csv(filename, sep='|')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_products(self) -> dict:\n\t\tproducts = dict()\n\n\t\tdb = Database()\n\t\tdb.create_connection(self._file_path)\n\t\trows = db.get_products()\n\t\tdb.close_connection()\n\n\t\tfor row in rows:\n\t\t\tif row[0] not in products:\n\t\t\t\ttry:\n\t\t\t\t\tproducts[row[0]] = Product(row[0], row[1], row[2], row[3]) # code, price, lastupdate, currency\n\t\t\t\texcept Exception as e: \n\t\t\t\t\t# IF the database was not correct parsed, the item will be discarted, \n\t\t\t\t\t# the event will be logged in the log file and the program will continue\n\t\t\t\t\tlogging.error(str(datetime.now())+': ' + e)\n\t\t\t\t\tcontinue\n\n\t\treturn products", "def read_from_product_collection(self):\n try:\n result = db.collection_product.find()\n except Exception as e:\n raise e\n \n return result", "def get_data(self):\n products_list = []\n for category in CATEGORIES:\n json_data = self.url_to_json(category)\n pages_nb = self.retrieve_cat_pages_nb(json_data)\n for page in range(pages_nb):\n page_json_data = self.page_to_json(category, page+1)\n products = page_json_data[\"products\"]\n for p in products:\n params = {\n 'brands': \"\",\n 'product_name_fr': \"\",\n 'nutrition_grades': \"\",\n 'stores': \"\",\n 'url': \"\",\n 'categories': \"\"\n }\n for key in params:\n try:\n params[key] = p[key]\n except KeyError:\n continue\n if params['product_name_fr'] != \"\" and params['nutrition_grades'] != \"\" and params['url'] != \"\" and params['categories'] != \"\":\n product = Product(brand=params['brands'],\n name=params['product_name_fr'],\n nutrition_grade=params['nutrition_grades'],\n stores=params['stores'], url=params['url'],\n category=params['categories'])\n products_list.append(product)\n try:\n self.manager.save_all(self.clean_data(products_list))\n print(f\"\\n La base de donnรฉes |{DB_NAME}| a รฉtรฉ peuplรฉe \\n\")\n except:\n print(\"\\n Une erreur s'est produite lors \"\n \"du peuplement de la base de donnรฉes \\n\")", "def load_products():\n\n print \"Loading Products\"\n\n for i, row in enumerate(open(\"data/mock_product_data.csv\")):\n row = row.rstrip()\n title, price, inventory = row.split(\",\")\n\n product = Product(title=title,\n price=price,\n available_inventory=inventory)\n\n db.session.add(product)\n\n db.session.commit()", "def connect_db_and_load_data(cls):\n db.connect()\n db.create_tables([Product], safe=True)\n load_data(transform_data('./inventory.csv'))", "def get_products(self):\n con = dbcon()\n cur = con.cursor()\n cur.execute(\"SELECT * FROM products;\")\n res = cur.fetchall()\n if res:\n prdcts=[]\n for prodct_item in res:\n picked_prdct = {\n 'product_id':prodct_item[0],\n 'product_name':prodct_item[1],\n 'price':prodct_item[2],\n 'quantity':prodct_item[3]\n }\n prdcts.append(picked_prdct)\n return jsonify({\"Products\": prdcts}), 200\n return jsonify({\"message\":\"No products in store\"})", "def load_products():\n\n for i, row in enumerate(open(\"seed_data/category.product\")):\n row = row.rstrip()\n name = row.split(\"|\")\n product_category = ProductCategoryModel(name=name)\n db.session.add(product_category)\n\n for i, row in enumerate(open(\"seed_data/product.product\")):\n row = row.rstrip()\n name, short_description, long_description, product_category_id, img_path_xs, img_path_sm, img_path_md, img_path_lg = row.split(\"|\")\n product = ProductModel(name=name,\n short_description=short_description,\n long_description=long_description,\n product_category_id=product_category_id,\n img_path_xs=img_path_xs,\n img_path_sm=img_path_sm,\n img_path_md=img_path_md,\n img_path_lg=img_path_lg)\n db.session.add(product)\n\n for i, row in enumerate(open(\"seed_data/location.product\")):\n row = row.rstrip()\n name, description, address1, address2, city, state, zip_code, country, latitude, longitude, direction_url = row.split(\"|\")\n location = LocationModel(name=name,\n description=description,\n address1=address1,\n address2=address2,\n city=city,\n state=state,\n zip_code=zip_code,\n country=country,\n latitude=latitude,\n longitude=longitude,\n direction_url=direction_url)\n db.session.add(location)\n\n for i, row in enumerate(open(\"seed_data/location_product.product\")):\n row = row.rstrip()\n location_id, product_id, price, num_available = row.split(\"|\")\n location_product = LocationProductModel(location_id=location_id,\n product_id=product_id,\n price=price,\n num_available=num_available)\n db.session.add(location_product)\n\n db.session.commit()", "def read_stock(db, openfile):\n pass", "def product_db() -> List[Text]:\n\n return [\n \"credit\",\n \"forex\",\n \"debit\",\n \"atm\"\n ]", "def get_product_details(self):\n\n db.execute(\"SELECT * FROM Product WHERE id = %s\", (self.id,))\n product = db.fetch()\n\n self.name = product[1]\n self.brand = product[2]\n self.nutriscore_id = product[3]\n self.store = product[4]\n self.description = product[5]\n self.url = product[6]", "def get_cart_contents(db):", "def return_products():\n with MY_CONNECTION as connection:\n cursor = connection.cursor()\n cursor.execute(\n \"\"\"\n SELECT id_product, product_name, product_price, in_stock, description\n FROM Products\n \"\"\")\n return cursor.fetchall()", "def read_database():\n file = tables.open_file(glob.datafile)\n table_d = file.root.VelibData.dynamic\n table_s = file.root.VelibData.static\n n_rows = len(table_d)\n print \"Nrows in dynamic table:\", n_rows\n print \"N stations:\", len(table_d[0][\"last_update\"])\n print \"Time of most recent sampling:\", \\\n time.asctime(time.localtime(recover_time(table_d[-1][\"sample_time\"])))\n print \"Nbikes available at most recent sampling:\", \\\n table_d[n_rows-1][\"available_bikes\"]\n print \"Time of last_update at most recent sampling:\", \\\n time.asctime(\n time.localtime(recover_time(table_d[n_rows-1][\"last_update\"][0])))\n print \"Number arr\", table_s[0][\"number\"]\n file.close()", "def retrieve_product_infos(self):\n\n # PRODUCT NAME\n try:\n product_name = self.product['product_name'].capitalize()\n except KeyError:\n product_name = None\n\n # PRODUCT CODE\n try:\n product_code = self.product['code'].capitalize()\n except KeyError:\n product_code = None\n\n # URL\n try:\n product_url = self.product['url'].lower()\n except KeyError:\n product_url = None\n\n # IMAGE URL\n try:\n image_url = self.product['image_url'].lower()\n except KeyError:\n image_url = None\n\n # QUANTITY\n try:\n quantity = self.product['quantity'].capitalize()\n except KeyError:\n quantity = None\n\n # INGREDIENTS\n try:\n ingredients = self.product['ingredients_text_fr'].capitalize()\n except KeyError:\n ingredients = None\n\n # BRAND\n brands = []\n try:\n for brand in self.product['brands'].split(','):\n brand = brand.strip().capitalize()\n if (\n brand != ''\n and brand not in brands\n ):\n brands.append(brand)\n except KeyError:\n pass\n\n # STORES\n stores = []\n try:\n for store in self.product['stores'].split(','):\n store = store.strip().capitalize()\n if (\n store != ''\n and store not in stores\n ):\n stores.append(store)\n except KeyError:\n pass\n\n # COUNTRY\n try:\n countries = self.product['countries'].capitalize()\n except KeyError:\n countries = None\n if 'France' in countries:\n countries = 'France'\n else:\n countries = None\n\n # COMPARE TO CATEGORY\n try:\n compare_to = self.product['compared_to_category'].capitalize().split(':')[1]\n except KeyError:\n compare_to = None\n try:\n Categories.objects.get(\n name=compare_to\n )\n except Categories.DoesNotExist:\n compare_to = None\n except:\n importable = False\n\n # CATEGORIES HIERARCHY\n try:\n categories_hierarchy = [\n category.split(':')[1] for category in self.product['categories_hierarchy']\n ]\n except KeyError:\n categories_hierarchy = None\n\n # NUTRISCORE GRADE\n nutriscore_labels = [\n 'nutrition_grade_fr',\n 'nutriscore_grade'\n ]\n nutriscore = 'F'\n i = 0\n while (\n i < len(nutriscore_labels)\n and nutriscore == 'F'\n ):\n try:\n nutriscore = self.product[nutriscore_labels[i]].upper()\n except KeyError:\n i += 1\n\n product_infos = {\n 'product_name': product_name,\n 'product_code': product_code,\n 'product_url': product_url,\n 'image_url': image_url,\n 'quantity': quantity,\n 'ingredients': ingredients,\n 'brands': brands,\n 'stores': stores,\n 'countries': countries,\n 'compare_to': compare_to,\n 'categories_hierarchy': categories_hierarchy,\n 'nutriscore': nutriscore\n }\n\n nutriments = self.product['nutriments']\n for nutriment in self.list_nutriments:\n try:\n product_infos[nutriment] = float(nutriments[nutriment])\n except KeyError:\n product_infos[nutriment] = 0\n\n return product_infos", "def read_db(self):\n with open(self.filename, 'r') as database:\n data = json.load(database)\n self.data = data", "def read_data_from_file(file_name):\r\n list_of_product_objects = []\r\n try:\r\n with open(file_name, \"r\") as objF:\r\n for row in objF:\r\n list_of_product_objects.append(row.strip().split(\",\"))\r\n except IOError:\r\n print(\"\\tProduct database is empty.\\nAdd new products.\")\r\n with open(file_name, \"a\") as objF:\r\n lstrow = [\"Product\", \"Price\"]\r\n objF.write(str(lstrow[0]) + \",\" + str(lstrow[1]))\r\n\r\n return list_of_product_objects", "def read(self):\n self.connect()\n get_books = f\"select * from {self.book_table}\"\n try:\n self.cur.execute(get_books)\n self.con.commit()\n for i in self.cur:\n yield i\n except MySQLError as err:\n messagebox.showinfo(\"Failed to fetch files from database\")\n print(err)", "def read_db():\n with open(\"config.json\") as f:\n config = json.load(f)\n \n conn = psycopg2.connect(dbname='cage_sc_db', user='cage_db_user', \n password='legend', host='10.66.193.71')\n cursor = conn.cursor()\n\n # cmd = \"SELECT value_raw, timestamp FROM numeric_data WHERE endpoint_name='krstc_baseline' AND timestamp>'2019-09-27T00:00';\"\n \n # cmd = \"SELECT * FROM endpoint_id_map;\"\n \n # cmd = \"SELECT value_cal, timestamp FROM numeric_data WHERE endpoint_name='cage_coldPlate_temp' AND timestamp>'2019-09-03T00:02';\"\n \n # cmd = \"SELECT value_cal, timestamp FROM numeric_data WHERE endpoint_name='cage_pressure' AND timestamp>'2019-09-27T00:00';\"\n \n cmd = \"SELECT value_cal, timestamp FROM numeric_data WHERE endpoint_name='cage_ln_level' AND timestamp>'2019-09-27T00:00';\"\n \n # cmd = \"SELECT value_raw, timestamp FROM string_data WHERE endpoint_name='krstc_hv_status' AND timestamp>'2019-08-01';\"\n \n cursor.execute(cmd)\n\n # retrieve data. returns a list of tuples.\n record = cursor.fetchall()\n \n # print(type(record[0]))\n \n # dt = record[0][1]\n \n # print(dt)\n \n for rec in record:\n print(rec)", "def fill_data_product(self):\n self.product.fill_data_product(self.list_products, self.mycursor, self.my_database)", "def return_items(self):\n cur = self.cursor\n cur.execute(f\"SELECT * FROM {self.product_name}\")\n products = cur.fetchall()\n return products", "def get_all_2(conn) -> str:\n with conn.cursor() as cursor:\n cursor.execute(\"\"\"select products.id, \n products.name, \n products.price, \n products.image, \n products.category_id, \n product_categories.name\n from products\n inner join product_categories\n on products.category_id=product_categories.id\n where products.deleted=false order by id\"\"\")\n try:\n return cursor.fetchall()\n except TypeError:\n raise errors.StoreError", "def db_for_read(self, model, **hints):\n\t\tif model._meta.app_label == 'product':\n\t\t\treturn 'product_dbs'\n\t\treturn None", "def import_product_data(directory_name, product_file):\n\n start = time.time()\n\n mongo = MongoDBConnection()\n\n with mongo:\n LOGGER.info(\"Establishing MongoDB connection\")\n database = mongo.connection.storeDB\n\n LOGGER.info(\"Establishing databases\")\n products = database[\"products\"]\n initial_entries = database.products.count_documents({})\n\n #entry counts\n added_entries = 0\n\n with open(os.path.join(directory_name, product_file)) as csv_file:\n\n product_data = csv.reader(csv_file, delimiter=\",\")\n for entry in product_data:\n try:\n product_entry = {\"product_id\":entry[0],\n \"description\":entry[1],\n \"product_type\":entry[2],\n \"quantity_available\":entry[3]}\n products.insert_one(product_entry)\n added_entries += 1\n LOGGER.info(f\"Added {entry[0]} to product database\")\n except peewee.IntegrityError:\n LOGGER.info(f\"Error adding {entry[0]} to product database\")\n\n final_entries = database.products.count_documents({})\n\n return((initial_entries, added_entries, final_entries,\n (time.time() - start)))", "def DB_read(self, **kwargs):\n if os.path.isdir(self.str_DBpath):\n self.dp.qprint(\"Reading pman DB from disk...\\n\")\n self._ptree = C_stree.tree_load(\n pathDiskRoot = self.str_DBpath,\n loadJSON = True,\n loadPickle = False)\n self.dp.qprint(\"pman DB read from disk...\\n\")\n self.col2_print('Reading pman DB from disk:', 'OK')\n else:\n P = self._ptree\n # P.cd('/')\n # P.mkdir('proc')\n P.tree_save(\n startPath = '/',\n pathDiskRoot = self.str_DBpath,\n failOnDirExist = False,\n saveJSON = True,\n savePickle = False\n )\n self.col2_print('Reading pman DB from disk:',\n 'No DB found... creating empty default DB')\n self.dp.qprint(Colors.NO_COLOUR, end='')", "def read_stock_codes_from_db():\n\n print('connecting to database...')\n Stocks = get_db()['Stocks']\n print('reading...')\n\n stocks = Stocks.find()\n return stocks", "def product_tables(self): \r\n\r\n self.mycursor.execute('CREATE TABLE IF NOT EXISTS product(\\\r\n PROD_id BIGINT PRIMARY KEY,\\\r\n PROD_name VARCHAR(100) NOT NULL,\\\r\n PROD_grade CHAR(1) NOT NULL,\\\r\n PROD_url VARCHAR(150) NOT NULL UNIQUE)')", "def retrieve_from_db(self):\n pass", "def test_load_products(self):\n\n call_command('load_products')\n\n products_count = len(DjangoProductRepository().all())\n\n self.assertEqual(\n products_count + 1,\n self._get_num_lines_from_csv('products.csv'),\n )", "def initialize_database():\n db = Database(database_name)\n i, m, u, p = db.fetch_needed_data()\n\n return i, m, u, p", "def populate_database(self):\n self.insert_products()\n self.insert_categories()\n self.insert_products_categories()\n self.insert_stores()\n self.insert_products_stores()", "def set_store_details(self):\n query = db.select([self.tables.columns.ProductName,\n self.tables.columns.QuantityPerUnit,\n self.tables.columns.UnitPrice,\n self.tables.columns.UnitsInStock])\n print(query)\n ResultProxy = self.connection.execute(query)\n ResultSet = ResultProxy.fetchall()\n return ResultSet", "def load_products_data():\r\n data_path = join(current_path(), '..', 'data')\r\n product_files = glob(join(data_path, 'products', '*.xml'))\r\n products_list = []\r\n for product_filename in product_files:\r\n tree = etree.parse(product_filename)\r\n product_dict = dict_from_element(tree.getroot())\r\n if not re.match('^[0-9a-z.]+$', product_dict['index_name']):\r\n raise Exception('Invalid index name %s at %s, must match [a-z.]+' %\r\n (product_dict['index_name'] , basename(product_filename)))\r\n products_list.append(product_dict)\r\n return products_list", "def loadproducts(lid):\r\n db = get_db()\r\n\r\n b_id = session.get(\"user_id\")\r\n product_list = {}\r\n\r\n if lid == \"Products\":\r\n query = \"SELECT product_id, product_name FROM product WHERE for_business = ? AND quantity > 0\"\r\n warehouses = db.execute(query, (b_id,)).fetchall()\r\n for products in warehouses:\r\n product_list[products[0]] = products[1]\r\n else:\r\n query = \"SELECT prod_id FROM warehouse where loc_id = ? AND b_id = ?\"\r\n warehouses = db.execute(query, (lid, b_id,)).fetchall()\r\n for products in warehouses:\r\n product_name = db.execute(\r\n \"SELECT product_name FROM product WHERE product_id = ? AND for_business = ?\",\r\n (products[\"prod_id\"], b_id,),\r\n ).fetchone()\r\n product_list[products[\"prod_id\"]] = product_name[\"product_name\"]\r\n\r\n return jsonify(product_list)", "def peek_database(persistency_dir: Path, device_id: str):\n database_path = persistency_dir.joinpath(device_id, \"caching\", \"astarte.db\")\n properties = (\n sqlite3.connect(database_path).cursor().execute(\"SELECT * FROM properties\").fetchall()\n )\n parsed_properties = []\n for interface, major, path, value in properties:\n parsed_properties += [(interface, major, path, pickle.loads(value))]\n return parsed_properties", "def readDB():\n if not os.path.exists(filenameDB):\n return { }\n \n with open(filenameDB, \"r\") as csvfile:\n rows = csv.reader(csvfile)\n if rows:\n db = { }\n for r in rows:\n if len(r)==2 and isinstance(r[0],str) and isinstance(r[1],str):\n db[r[1]] = r[0]\n return db\n return { }", "def read_db_energies( self ):\n for row in self.db.select():\n db_energy = row.get(\"energy\")\n if ( not db_energy is None ):\n self.db_energies.append(db_energy)", "def __init__(self, database):\r\n\r\n self.num_of_items = 100 # Has to be a 1 followed by 0's\r\n self.num_of_customers = 200\r\n self.num_of_employees = 50\r\n self.GENERATE_DATA = True\r\n self.print_customers = False\r\n self.print_items = False\r\n self.print_counters = False\r\n self.print_employees = False\r\n self.database_name = database.database_name\r\n\r\n # Holds all products\r\n self.ID_DICT = {}\r\n\r\n self.customer_purchases = []\r\n self.all_customers = {}\r\n\r\n if self.GENERATE_DATA:\r\n print(\"\\n-------------| DATA GENERATION |-------------\\n\")\r\n self.conn = sqlite3.connect(self.database_name)\r\n self.c = self.conn.cursor()\r\n print(\"> Successfully connected to\", self.database_name)\r\n self.generateProducts()\r\n print(\"> Generated and stored\", self.num_of_items*6, \"items successfully\")\r\n self.generateEmployees()\r\n print(\"> Generated and stored\", self.num_of_employees, \"employees successfully\")\r\n self.generateCustomers()\r\n print(\"> Generated and stored\", self.num_of_customers, \"customers successfully\")\r\n self.c.close()\r\n self.conn.close()\r\n print(\">\", self.database_name, \"is now closed\")\r\n print(\"\\n---------------------------------------------\\n\")", "def fill_products(self):\n cursor = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)\n categories = dict()\n for page in range(1, 2):\n result = requests.get(\n 'https://fr.openfoodfacts.org/cgi/search.pl?page_size=1000&page={}&action=process&json=1'.format(\n page)).json()\n for element in result['products']:\n try:\n cursor.execute(\n \"INSERT INTO product (name, store, nutrition_grade, url) VALUES (%s, %s, %s, %s) RETURNING \"\n \"id, name\",\n (element[\"product_name\"], element[\"stores\"], element[\"nutrition_grade_fr\"], element[\"url\"]))\n # un except pour รฉviter les erreurs de clรฉs\n query_result = cursor.fetchone()\n for category in element[\"categories_tags\"]:\n try:\n cursor.execute(\"INSERT INTO product_category(product_id, category_id) VALUES (%s, %s)\",\n (query_result[0], self.categories[category]))\n except KeyError:\n print(\"Categorie insertion failed\")\n\n print(element[\"product_name\"])\n except KeyError:\n print(f'product insertion failed:')\n\n self.conn.commit()\n cursor.close()", "def generateProducts(self):\r\n\r\n # Creates items in each category\r\n for i in range(self.num_of_items):\r\n self.ID_DICT[i+self.num_of_items] = random.randint(1, 10)\r\n self.ID_DICT[i+self.num_of_items*2] = random.randint(1, 10)\r\n self.ID_DICT[i+self.num_of_items*3] = random.randint(1, 10)\r\n self.ID_DICT[i+self.num_of_items*4] = random.randint(1, 10)\r\n self.ID_DICT[i+self.num_of_items*5] = random.randint(1, 10)\r\n self.ID_DICT[i+self.num_of_items*6] = random.randint(1, 10)\r\n\r\n\r\n # Sort for easy selection\r\n sorted(self.ID_DICT)\r\n\r\n for product in self.ID_DICT.keys():\r\n temp_int = self.ID_DICT[product]\r\n self.c.execute(\"INSERT INTO Products (ProductID, Price) VALUES (?, ?)\", (product, self.ID_DICT[product]))\r\n self.conn.commit()\r\n\r\n if self.print_items:\r\n print(\"\\nAll items in store:\")\r\n print(self.ID_DICT)\r\n print()", "def config_db():", "def read_catalog():\n categories = session.query(Category).all()\n items = session.query(CatalogItem).order_by(CatalogItem.id.desc())\n quantity = items.count()\n return categories, items, quantity", "def loadProducts():\n dump = os.path.dirname(os.path.abspath(__file__)) + \"/dump.json\"\n data = open(dump, 'r')\n for deserialized_object in serializers.deserialize(\"json\", data):\n deserialized_object.save()", "def test_see_products_for_rent_handler(self):\n\n tables = ['customers', 'products', 'rentals']\n mongo_drop_table = mdb.DropData(tables)\n result = mongo_drop_table.drop_table()\n print(result)\n try:\n current_dir = os.getcwd()\n directory_name = current_dir + '\\\\lesson5\\\\data\\\\'\n file_name_dict = {'products': 'products.csv', 'customers': 'customers.csv',\n 'rentals': 'rentals.csv'}\n for key, value in file_name_dict.items():\n tmp_file = directory_name + value\n mongo_insert = mdb.ImportData(key, tmp_file)\n result = mongo_insert.import_data()\n print(result)\n except FileNotFoundError as e:\n logger.error('exception %s', e, exc_info=True)\n result = 'exception {}'.format(e)\n my_products_list = main.see_products_for_rent_handler()\n self.assertEqual(len(my_products_list), 10)", "def test_show_available(self):\n database.import_data('csvs', 'product_data.csv', 'customer_data.csv', 'rentals_data.csv')\n actual_available = database.show_available_products()\n expected_available = {'prd001': {'description': 'TV', 'product_type': 'livingroom',\n 'quantity_available': '3'},\n 'prd002': {'description': 'Couch', 'product_type': 'livingroom',\n 'quantity_available': '1'}}\n self.assertEqual(actual_available, expected_available)\n database.delete_database()\n\n database.import_data('csvs', 'produc_data.csv', 'customer_data.csv', 'rentals_data.csv')\n database.delete_database()", "def _database(self):\n ...", "def get_data_from_database(self, database_type):\n if database_type == \"render\":\n try:\n connection = sqlite3.connect(self.filepath_render_database)\n pointer=connection.cursor()\n\n pointer.execute(\"select * from render_information\")\n\n conntent= pointer.fetchall()\n connection.commit()\n print(conntent)\n return conntent\n except:\n print(\"was not able to read data\")\n return False\n if database_type == \"object\":\n try:\n connection = sqlite3.connect(self.filepath_object_database)\n pointer=connection.cursor()\n\n pointer.execute(\"select * from object_information\")\n\n conntent= pointer.fetchall()\n connection.commit()\n print(conntent)\n return conntent\n except:\n print(\"was not able to read data from object database\")\n return False \n pass\n\n if database_type == \"output\":\n try:\n connection = sqlite3.connect(self.filepath_output_database)\n pointer=connection.cursor()\n\n pointer.execute(\"select * from output_information\")\n\n conntent= pointer.fetchall()\n connection.commit()\n print(conntent)\n return conntent\n except:\n print(\"was not able to read data from output database\")\n return False \n pass", "def read_sql(self):\n pass", "def api_all():\r\n\tconn = sqlite3.connect('Shopify_products.db')\r\n\tconn.row_factory = dict_factory\r\n\tcur = conn.cursor()\r\n\tall_products = cur.execute('SELECT * FROM products WHERE inventory_count>0;').fetchall()\r\n\treturn jsonify(all_products)", "def get_all_products(self):\n\t\tpass", "def fetchall(self, databaseName):\n pass", "def get_all_data(self, site: str) -> List[Tuple[int, str, str, float]]:\n try:\n self._cursor.execute(f\"SELECT productId,productIdStr,imageUrl,dateAdded FROM {site}\")\n except sqlite3.OperationalError:\n raise sqlite3.OperationalError(f\"Table '{site}' does not exist. You can create it by the `create_table_safe` method.\")\n return self._cursor.fetchall()", "def read_database(app):\n app.status.cursorToHourglass()\n app.central.closeAllSubWindows()\n app.database().scan()\n app.status.cursorToNormal() \n app.refresh()", "def read():\n with open(DBNAME) as f:\n foo = pickle.loads(f.read())\n print foo", "def read_csv_and_insert_product_sql(self, a_columns):\n \n csv_reader = csv.DictReader(open('%s/tbl_products.csv' %(self._root_dir)))\n \n nb_rows = 0\n \n lookup_dict = Lookup(LCSVRoddExtractor.PRODUCT_MAPPER)\n \n # for each line of data create an insert line\n\n insert_line = \"INSERT INTO %s.%s (%s) VALUES (%s)\"\n \n columns = self._create_sql_columns(a_columns)\n \n #INSERT products_2_distribution (roddID,disID) SELECT p.roddID, p2d.disID FROM products p, products_2_distribution p2d ,distribution_type d WHERE d.name = 'ARCHIVE' and p.internalID = 'EO:EUM:SW:MULT:035'\n insert_in_prod2dis_p1 = \"INSERT RODD.products_2_distribution (roddID,disID) SELECT p.roddID, p2d.disID FROM products p, products_2_distribution p2d, distribution_type d WHERE d.name = '%s'\"\n \n insert_in_prod2dis_p2 = \" and p.internalID = '%s'\"\n\n prod2dis_insert_list = []\n\n for row in csv_reader:\n cpt_keys = 0\n values = \"\"\n has_changed = False\n \n val = \"\"\n \n if row.get('EUMETCast') == 'Y':\n val = 'EUMETCAST'\n \n if row.get('GTS') == 'Y':\n val = 'GTS'\n \n if row.get('MSGDirect') == 'Y':\n val = 'MSGDIRECT'\n \n if val == \"\":\n val = 'ARCHIVE'\n \n prod2dis_insert_list.append(insert_in_prod2dis_p1 % (val))\n internalID = ''\n for elem in a_columns:\n \n #get list of matching keys\n key = lookup_dict.get_key(elem)\n \n if not key:\n raise Exception(\"Error: %s as no matching keys in %s\" %(elem, LCSVRoddExtractor.PRODUCT_MAPPER))\n \n val = row.get(key[0], None)\n \n # memorize the internalID if necessary\n if elem == \"internalID\":\n internalID = val\n \n # and elem == \"resources_1\"\n if nb_rows == 200 and (\"%\" in val):\n print(\"This is the break\")\n \n has_changed, val = self._transform_product_table_data(elem, val)\n \n # if no transformations performed apply the standard rule taht considers the value as a string\n if has_changed:\n val = val if val else \"null\"\n else:\n val = \"%s\" % ( \"'%s'\" % (val) if val else \"NULL\")\n \n # add in values\n if cpt_keys == 0:\n values += \"%s\" % ( val )\n else:\n values += \", %s\" % ( val )\n \n \n cpt_keys += 1\n \n insert = insert_line % (\"RODD\", \"products\", columns, values)\n \n #print('[r%d]:insert = %s\\n' %(nb_rows, insert) )\n #file.write(\"%s;\\n\" %(insert))\n self._conn.execute(\"%s;\" %(insert))\n \n for req in prod2dis_insert_list:\n s = req + insert_in_prod2dis_p2 % (internalID)\n self._conn.execute(s)\n \n nb_rows += 1", "def get_product_info(self, product_id: str) -> Dict:\n product_info_request = \"SELECT * FROM product WHERE id = %s\"\n return self.query(product_info_request, (product_id,))[0]", "def return_product(product_id):\n with MY_CONNECTION as connection:\n cursor = connection.cursor()\n cursor.execute(\n \"\"\"\n SELECT id_product, product_name, product_price, in_stock, description\n FROM Products\n WHERE id_product=?\n \"\"\",\n (product_id,))\n return cursor.fetchone()", "def process_products_file(products_df: pd.DataFrame):\n products = []\n file_size = len(products_df)\n logging.info(f\"Processing PRODUCTS.csv file... {file_size} rows\")\n for index, row in products_df.iterrows():\n if not pd.isnull(row[\"NAME\"]):\n product = Product(sku=row[\"SKU\"], store=\"Richart's\")\n product.brand = row[\"BRAND\"]\n product.barcodes = row[\"BARCODES\"]\n product.description = cleanhtml(row[\"DESCRIPTION\"])\n product.category = row[\"CATEGORY\"]\n product.image_url = row[\"IMAGE_URL\"]\n product.package = row[\"BUY_UNIT\"]\n product.name = row[\"NAME\"]\n products.append(product)\n else:\n logging.warning(f\"PRODUCTS | Product without name! will not be included - index: {index}\")\n\n bulk_size = len(products)\n if bulk_size == BATCH_SIZE:\n try:\n session.bulk_save_objects(products)\n session.commit()\n logging.info(f\"Inserted {bulk_size} rows to PRODUCTS table\")\n products = []\n except Exception as e:\n logging.exception(f\"PRODUCTS error. msg: {e}\")\n session.rollback()", "def show_all_products():\n\n data = cur.execute(\"\"\"SELECT productid, productname, unitcost, stock FROM catalogue\"\"\").fetchall()\n\n print(tabulate(data, headers=[\"Product ID\", \"Name\", \"Cost\", \"Stock\"]))", "def populate_database(replace=False):\n print(\"Creating database from RDF cache\")\n if replace or not os.path.exists(db_name):\n if os.path.exists(rdf_dir):\n print('Exracting data from cataglog')\n gids = get_gids()\n cat = get_catalog(gids)\n print(\"Converting data into wide form\")\n cat_wide = get_catalog_wide(cat)\n print(\"Saving data to database\")\n save_catalog_to_db(cat_wide)\n else:\n print(\"No RDF cache. Run download-cache first.\")\n else:\n print(\"Database exists. To overwrite set '--replace True'\")", "def load_expenditures():\n\n Expenditure.query.delete()\n\n with open(expenditure_file) as f:\n for _ in range(1):\n next(f)\n \n for row in f:\n row = row.rstrip()\n expenditure_data = row.split(\",\")\n print(expenditure_data)\n\n id = expenditure_data[0]\n category_id = expenditure_data[1]\n price = expenditure_data[2]\n date_of_expenditure = expenditure_data[3]\n expenditure_userid = expenditure_data[4]\n where_bought = expenditure_data[5]\n description = expenditure_data[6]\n\n expenditure = Expenditure(\n id = id,\n category_id = category_id,\n price = price,\n date_of_expenditure = get_datetime(date_of_expenditure),\n expenditure_userid = expenditure_userid,\n where_bought = where_bought,\n description = description\n )\n\n db.session.add(expenditure)\n\n db.session.commit()", "def openProduct(self, product_path):\r\n\r\n product = None\r\n variables = self.readVariables(product)\r\n attributes = self.readAttributes(product)\r\n return product, variables, attributes", "def read_database(db_path, db_file, *args):\n\n db_filepath = os.path.join(db_path, db_file)\n\n # list to store loaded data\n data_imported = []\n conn = sqlite3.connect(db_filepath)\n\n for data_name in args:\n\n\n info = f'Reading {data_name} from database................'\n print(info, end=\"\")\n data_name_in_db = conn.execute(\n f\"\"\"SELECT name FROM sqlite_master WHERE type='table' \n AND name='{data_name}'; \"\"\").fetchall()\n if data_name_in_db:\n df = pd.read_sql(f\"select * from {data_name}\", con=conn)\n substitute_names(df)\n # revert single column DataFrame to Series\n if 'index' in df.columns:\n df.set_index('index', inplace=True)\n df = df.squeeze('columns')\n data_imported.append(df)\n print('ok')\n else:\n data_imported.append(None)\n print('no data')\n conn.close()\n return data_imported #if len(data_imported)>1 else data_imported[0]", "def get_data(db_dir, command, args = None):\n with lite.connect((db_dir)) as conn:\n try:\n cursor = conn.cursor()\n if args:\n cursor.execute(command,args)\n else:\n cursor.execute(command)\n data = cursor.fetchall()\n #print '[sql management] got all of the data requested according to:\\n--- %s ---\\n the data: %s'%(command, data)\n return data\n except:\n return None", "def load_DB(self):\n\t\tprint 'Loadind Data Base...'\n\t\tstream = open(self.DB_file)\n\t\tself.DB = cPickle.load(stream)\n\t\tstream.close()\n\t\tprint \"Number of documents in the Data Base: \", self.DB.nb_doc_total\n\t\tprint 'Loading completed'\n\t\treturn", "def get_products(self, data, category):\r\n for product_information in data['products']:\r\n name = product_information.get('product_name', None)\r\n # in order to remove linebreak from product name\r\n # print(\"WITH LINEBREAK : \", repr(name))\r\n if name:\r\n name = name.replace('\\n', '')\r\n # print(\"WITHOUT LINEBREAK : \", repr(name))\r\n category = Categories.objects.get(name=category)\r\n nutriscore = product_information.get('nutrition_grades', None)\r\n link = product_information.get('url', None)\r\n image = product_information.get('image_url', None)\r\n nutrition_image = product_information.get\\\r\n ('image_nutrition_url', None)\r\n if category is None \\\r\n or name is None \\\r\n or len(name) > 75 \\\r\n or nutriscore is None \\\r\n or link is None \\\r\n or image is None \\\r\n or nutrition_image is None:\r\n continue\r\n else:\r\n try:\r\n product, created = Products.objects.get_or_create(\r\n name=str(name),\r\n category=category,\r\n nutriscore=nutriscore,\r\n link=link,\r\n image=image,\r\n nutrition_image=nutrition_image,\r\n )\r\n if created:\r\n product.save()\r\n print(product.name)\r\n\r\n except Products.DoesNotExist:\r\n raise CommandError(\"Products %s could not been reached\"\r\n % name)\r\n except IntegrityError:\r\n continue", "def read_create_fields(self):\n return [\"product\"]", "def read(self, database ='project'):\n\t\tfile = open(self.file_name, \"r\")\n\n\t\ti = 1\n\t\tseptics = []\n\t\tfor line in file:\n\t\t\tif i > 2:\n\t\t\t\tval = line.split()\n\t\t\t\tself.check_cols(val, 13, 'septic')\n\n\t\t\t\tsep = {\n\t\t\t\t\t'name': val[0].lower(),\n\t\t\t\t\t'q_rate': val[1],\n\t\t\t\t\t'bod': val[2],\n\t\t\t\t\t'tss': val[3],\n\t\t\t\t\t'nh4_n': val[4],\n\t\t\t\t\t'no3_n': val[5],\n\t\t\t\t\t'no2_n': val[6],\n\t\t\t\t\t'org_n': val[7],\n\t\t\t\t\t'min_p': val[8],\n\t\t\t\t\t'org_p': val[9],\n\t\t\t\t\t'fcoli': val[10],\n\t\t\t\t\t'description': val[12] if val[12] != 'null' else None # 12 index because extra column\n\t\t\t\t}\n\t\t\t\tseptics.append(sep)\n\t\t\ti += 1\n\n\t\tif database == 'project':\n\t\t\tdb_lib.bulk_insert(project_base.db, project_parmdb.Septic_sep, septics)\n\t\telse:\n\t\t\tdb_lib.bulk_insert(datasets_base.db, datasets_parmdb.Septic_sep, septics)", "def product_list(id):\r\n\r\n db = get_db()\r\n product_list = db.execute(\r\n \"SELECT product_id, product_name, quantity FROM product WHERE for_business = ? AND quantity > 0\",\r\n (id,),\r\n ).fetchall()\r\n return product_list", "def get_details(self,p_id):\n workbook = load_workbook(\"products.xlsx\")\n products = workbook.active\n #loop for finding specified product\n for row in range(2,products.max_row+1):\n if products[row][0].value == p_id:\n self.id = p_id\n self.row = row\n self.quantity= products[row][2].value\n self.name = products[row][1].value\n self.price = products[row][3].value\n break\n else:\n self.id = 0\n print(\"no Such Id exits!_ \")", "def initialize():\n \n db.connect()\n db.create_tables([Product], safe=True)", "def get_product_2(conn, product_id: int) -> str:\n with conn.cursor() as cursor:\n cursor.execute(\"\"\"select id, name, price, image, category_id from products\n where id = {0}\"\"\".format(product_id))\n try:\n return cursor.fetchone()\n except TypeError:\n raise errors.StoreError", "def get_products(self, adi):\r\n obj = None\r\n if self.from_copy:\r\n with open(self.products_copy.format(adi), encoding='utf-8') as f:\r\n obj = json.load(f)\r\n return obj\r\n else:\r\n return self.rf.get_products(self.urls[adi])", "def select(self):\n connection = sqlite3.connect(DB_FILE)\n cursor = connection.cursor()\n cursor.execute(\"SELECT * FROM foodbank\")\n return cursor.fetchall()", "def show_available_products(*args):\n logger.info(f\"Preparing dict of available prodcuts...\")\n available_products = {}\n\n with MONGO:\n mdb = eval(Settings.connect_string)\n products = mdb[\"product\"]\n for doc in products.find():\n del doc[\"_id\"]\n if int(doc[\"quantity_available\"]) > 0:\n product_id = doc[\"product_id\"]\n del doc[\"product_id\"]\n available_products[product_id] = doc\n\n return available_products", "def extract_product(product):\r\n product = \"%\" + product + \"%\"\r\n cursor.execute(\"USE openfoodfacts;\")\r\n cursor.execute(\"\"\"SELECT Food.id, Food.name, categories_id, nutri_score, url, stores \\\r\n FROM Food \\\r\n INNER JOIN Categories ON Food.categories_id LIKE Categories.name\\\r\n WHERE Food.name LIKE %s;\"\"\", (product))\r\n product = cursor.fetchone()\r\n product_class = cl.Food(product)\r\n return product_class", "def get_products(self, filename: str):\n try:\n file = open(filename, \"r\")\n products = json.load(file)\n except FileNotFoundError as err:\n logging.error(f\"[ERROR] File {filename} not found. Traceback: {err}\")\n return False\n else:\n return products", "def list_all_products(*args):\n logger.info(f\"Perparing dict of all products...\")\n all_products_dict = {}\n\n with MONGO:\n mdb = eval(Settings.connect_string)\n products = mdb[\"product\"]\n all_products = products.find({})\n for product in all_products:\n product_id = product[\"product_id\"]\n del product[\"_id\"]\n del product[\"product_id\"]\n all_products_dict[product_id] = product\n return all_products_dict", "def load_product_image_catalog(self, sql_context,\n table_name=settings.IMAGE_TABLE_NAME):\n return (sql_context.read.format('jdbc')\n .options(url=\"jdbc:\" + settings.DB_URL,\n dbtable=table_name,\n driver=\"org.postgresql.Driver\")\n .load())", "def read_data_from_file(file_name):\r\n file = open(file_name, 'r')\r\n for line in file:\r\n data = line.split(\",\")\r\n newProduct = Product(data[0].strip(), data[1].strip())\r\n lstOfProductObjects.append(newProduct)\r\n file.close()\r\n return lstOfProductObjects", "def read(id):\n db = core.connect()\n return db[id]", "def test_see_all_different_products_handler(self):\n\n tables = ['customers', 'products', 'rentals']\n mongo_drop_table = mdb.DropData(tables)\n result = mongo_drop_table.drop_table()\n print(result)\n try:\n current_dir = os.getcwd()\n directory_name = current_dir + '\\\\lesson5\\\\data\\\\'\n file_name_dict = {'products': 'products.csv', 'customers': 'customers.csv',\n 'rentals': 'rentals.csv'}\n for key, value in file_name_dict.items():\n tmp_file = directory_name + value\n mongo_insert = mdb.ImportData(key, tmp_file)\n result = mongo_insert.import_data()\n print(result)\n except FileNotFoundError as e:\n logger.error('exception %s', e, exc_info=True)\n result = 'exception {}'.format(e)\n my_products_list = main.see_all_different_products_handler()\n self.assertEqual(len(my_products_list), 10)", "def get_all_product():\r\n\r\n with mysql.db_session() as session:\r\n product = session.query(Product).all()\r\n\r\n if not product:\r\n return response.create_not_found_response()\r\n\r\n response_data = [each.to_dict() for each in product]\r\n\r\n return response.Response(message=response_data)", "def populateDB(db, packages):\n conn = sqlite3.connect(db)\n cur = conn.cursor()\n print \"opened db successfully\"\n for vul_id, package in packages.items():\n for info in package:\n cur.execute('''INSERT INTO packages(name, version, release, vulnerabilityId, OS_name, OS_version)\n\t\t\t VALUES(?,?,?,?,?,?);''', (info['name'], info['version'], info['release'], vul_id, info['os_name'], info['os_version']))\n \n conn.commit()\n conn.close()", "def readProvidersFromDatabase(self, database):\n\t\ttry:\n\t\t\tdb = sqlite3.connect(database)\n\t\t\tc = db.cursor()\n\t\t\tc.execute(\"SELECT * FROM Providers\")\n\t\t\tfetched = c.fetchall()\n\t\t\tfor p in fetched:\n\t\t\t\tself.prvList.append(format3.Provider(p[1], p[2], p[3], petlib.pack.decode(p[4])))\n\t\texcept Exception, e:\n\t\t\tprint \"[%s] > Error during reading from the database: %s\" % (self.name, str(e))", "def read_db():\n # read config file\n config = configparser.ConfigParser()\n config.read_file(open(\"options.cfg\"))\n\n return config['DEFAULT']['DatabaseFilename']", "def get_stock_data():\n if not os.path.exists('./catalog/stock_data'):\n os.mkdir('./catalog/stock_data')\n \n inventory_data = {}\n inventory_file = './catalog/stock_data/inventory-bro.txt'\n \n download_data = True\n if os.path.exists(inventory_file):\n # Check that inventory file is no more than 1 day old\n filestat = os.stat(inventory_file)\n tm = datetime.datetime.fromtimestamp(filestat.st_mtime)\n today = datetime.datetime.now()\n dt = today - tm\n if dt.days < 1:\n download_data = False\n \n if download_data:\n # Get inventory data from ftp site\n from ftplib import FTP_TLS\n print 'Downloading inventory-bro.txt ....'\n ftps = FTP_TLS('ftp.appareldownload.com')\n ftps.login('Br0d3r', 'Br0d3r2oll')\n ftps.prot_p()\n #ftps.retrlines('LIST')\n ftps.retrbinary('RETR inventory-bro.txt', open(inventory_file, 'wb').write)\n ftps.quit()\n \n print \"Parse inventory-bro.txt ... \"\n first_row = None\n for row in csv.reader(open(inventory_file, 'rb')):\n itemRef = row[4].lower()\n if itemRef == 'style number':\n # save first row to be used as column header\n first_row = row\n continue\n \n source_attribs = [{'attribute_type': 'source', 'attribute_value': 'broderbros'}]\n \n inventory_data.setdefault(itemRef, [])\n \n color = row[8].lower()\n size = row[10].lower()\n \n # Warehouses starts at column 13\n for i in range(13, len(first_row)):\n wh_name = first_row[i]\n options = [\n {'option_type': 'color', 'option_value': color, 'attributes': []},\n {'option_type': 'size', 'option_value': size, 'attributes': []},\n {'option_type': 'warehouse', 'option_value': wh_name, 'attributes': source_attribs, 'shared': True},\n {'option_type': 'vendor', 'option_value': 'broderbros', 'attributes': source_attribs, 'shared': True},\n ]\n inventory_data[itemRef].append({'options': options, 'inventory': row[i]})\n \n # Pricing data\n pricing_tarfile = \"./catalog/stock_data/bro-AllStyles_R06.tar.gz\"\n download_data = True\n if os.path.exists(pricing_tarfile):\n # Check that file is no more than 1 day old\n filestat = os.stat(pricing_tarfile)\n tm = datetime.datetime.fromtimestamp(filestat.st_mtime)\n today = datetime.datetime.now()\n dt = today - tm\n if dt.days < 1:\n download_data = False\n \n if download_data:\n print 'Downloading items.csv for price data ....'\n br = utils.create_browser(1, 2)\n br.open(\"https://www.broderbros.com/cgi-bin/online/webbro/bro-index.w\")\n try:\n # Fill login form\n br.select_form(name = 'frmLogin')\n frm = br.form\n \n ctrl = frm.find_control('userName')\n ctrl.value = USERNAME\n ctrl = frm.find_control('password')\n ctrl.value = PASSWORD\n \n # Submit login form\n if TESTRUN: print 'Submit Login Form'\n \n br.select_form(name = 'frmLogin')\n br.submit()\n except:\n print \"Login form does not exist, please check URL, downloaded html or site is down\"\n return None\n try:\n tar_url = \"https://www.broderbros.com/cgi-bin/download/webshr/prod-info-view.w?f=bro-AllStyles_R06.tar.gz\"\n br.retrieve(tar_url, pricing_tarfile)\n except:\n print \"Error when downloading pricing file\"\n return None\n \n try:\n tar = tarfile.open(pricing_tarfile)\n for member in tar.getmembers():\n member.name = member.name.split('/')[-1] # strip directory from filename\n tar.extractall('catalog/stock_data/bro-AllStyles_R06')\n tar.close()\n except:\n print \"Error when extracting items.csv\"\n return None\n \n f_object = open('./catalog/stock_data/bro-AllStyles_R06/items_R06.csv', 'rb')\n #~ f_object = open('items_R06.csv', 'rb')\n \n print \"Parse items_R06.csv ... \"\n for row in csv.reader(f_object):\n itemRef = row[7].lower()\n if itemRef == 'style code':\n continue\n \n size = row[8].lower()\n color = row[11].lower()\n price = row[18]\n \n item_data = inventory_data.get(itemRef)\n if not item_data:\n continue\n # Find data with same size and color\n for var_dict in item_data:\n options = var_dict['options']\n opt_dict = {}\n for opt in options:\n opt_type = opt['option_type']\n opt_value = opt['option_value']\n if opt_type == 'size':\n opt_dict['size'] = opt_value\n elif opt_type == 'color':\n opt_dict['color'] = opt_value\n if opt_dict['size'] == size and opt_dict['color'] == color:\n var_dict['price'] = [{'price_type': 'retail_price', 'price': price}]\n \n f_object.close()\n \n try:\n shutil.rmtree(\"./catalog/stock_data/bro-AllStyles_R06\")\n #~ os.remove(\"./catalog/stock_data/bro-AllStyles_R06.tar.gz\")\n except:\n pass\n \n return inventory_data", "def handle(self, *args, **options):\n\n # pylint: disable=line-too-long\n\n # Connection to the database\n database = DbInteract()\n\n for categories in self.PRODUCTS_CATEGORIES:\n\n for category in categories[\"name\"]:\n # Registration of each category in a SQL table\n category_reference = database.insert_category(category, categories[\"alternative\"])\n\n # API call to get all food products for each category\n result = requests.get(\"https://fr.openfoodfacts.org/cgi/search.pl?action=process&tagtype_0=categories&tag_contains_0=contains&tag_0=\" + category + \"&sort_by=unique_scans_n&page_size=1000&axis_x=energy&axis_y=products_n&action=display&json=1\")\n products_details = result.json()\n\n for product in products_details[\"products\"]:\n\n attributes = [self.NAME, self.NUTRIGRADE, self.URL, self.IMAGE, self.NUTRITION_IMAGE, self.DESCRIPTION, self.STORE, self.BRAND]\n if not self._does_product_contains_given_attributes(product, attributes):\n continue # to the following product\n\n else:\n name = product[self.NAME]\n description = product[self.DESCRIPTION]\n url = product[self.URL]\n image = product[self.IMAGE]\n nutrition_image = product[self.NUTRITION_IMAGE]\n # Just to monitore\n print(url)\n\n nutrigrade = database.insert_nutrigrade(product[self.NUTRIGRADE])\n\n store = database.insert_store(product[self.STORE])\n\n brand = database.insert_brand(product[self.BRAND])\n\n database.insert_product(\n name,\n description,\n url,\n image,\n nutrition_image,\n category_reference,\n brand,\n store,\n nutrigrade,\n )\n\n self.stdout.write(self.style.SUCCESS('Successfully populate database'))", "def query_product_details(query_key, query_type):\n\n if query_type == \"search\":\n app.logconsole.info(\"Searching for products having topic: \" + str(query_key))\n products = query_db('select id,name from books where topic=\"' + str(query_key) + '\"')\n app.logfile.info('select id,name from books where topic=' + str(query_key))\n return jsonify(products)\n\n elif query_type == \"lookup\":\n app.logconsole.info(\"Looking up product having id: \" + str(query_key))\n product = query_db('select * from books where id=' + str(query_key))\n app.logfile.info('select * from books where id=' + str(query_key))\n return jsonify(product)", "def loadDB(self,dbfilename):\n \n db=[]\n with open(dbfilename,'r',encoding='ISO-8859-1') as dbfilename:\n dbreader= csv.reader(dbfilename,delimiter=self.sDelimiter )\n for lFields in dbreader:\n db.append(lFields)\n\n return db", "def _product_spec_by_id(self, product_id):\n if not isinstance(product_id, int):\n raise TypeError(\"Product ID should be integer\")\n\n for product_group in self.db:\n for db_id in self.db[product_group]:\n if product_id == db_id:\n return self.db[product_group][product_id]\n\n raise ProductDatabaseError('Wrong product ID. Not found in DB')", "def __print_from_database(self, product: dict, procedure_result: list):\n\n # procedure_result[1] = p_product_id\n # procedure_result[2] = p_exist_substitutes\n # procedure_result[3] = p_researched_subsitutes\n\n print('Produit dรฉjร  prรฉsent dans la base de donnรฉes.')\n # if product doesn't have substitutes in database\n if not procedure_result[2] and not procedure_result[3]:\n # get substitutes of the current product\n # from the openfoodfacts API\n substitutes = self.api_operator.get_substitutes(\n product['categories_tags'][-1],\n product.get('nutrition_grades', 'e'))\n self.database_manager.save_substitutes_sql_database(\n procedure_result[1], substitutes)\n operateur_result = []\n self.database_manager.fill_list_with_product_and_substitutes(\n procedure_result[1], operateur_result)\n self.printer(operateur_result)\n self.ask_with_input('Ok ? (y) ', -1, ('y',))", "def write_bestbuy_product_data(engine, api = 'http://api.remix.bestbuy.com/v1/products?format=json&pageSize=100&apiKey=q3yfbu6smh6bzydeqbjv9kas'):\n # delete the unnecesssary records\n products_data = preproc.getData_API(api, 'products')\n del products_data['videoChapters']\n del products_data['videoLanguages']\n products_data = products_data.where(pd.notnull(products_data), None)\n print products_data\n print products_data.columns\n # products_data.to_sql('bestbuy_products_data', con = engine, index = False, if_exists = 'replace')", "def get_items_from_category(save_db=False):\n query_result = pd.read_sql_query(\"\"\"SELECT c1.id, c1.name, c1.parent_id, c1.url\n FROM categories c1 LEFT OUTER JOIN categories c2\n ON c1.id = c2.parent_id\n WHERE c2.parent_id IS NULL\n LIMIT 1400 OFFSET 1300\"\"\", conn)\n for i in query_result.itertuples():\n name = i.name[:-10].strip()\n cat_url = i.url\n cat_id = i.id\n quantity = i.name[-10:].strip()\n \n for i in range(100):\n url = cat_url + f'&page={i+1}'\n print(url)\n soup = get_url(url)\n \n result = []\n \"\"\" item: div 'product-item' > div 'content'\n img: img 'product-imgage'\n title: p 'title'\n price: span 'price-regular'\n sale-tag: span 'sale-tag'\n final-price: span 'final-price'\n \"\"\"\n try:\n div_container = soup.find_all('div', {'class': 'product-item'})\n except Exception as err:\n print('ERROR BY DIV FINDALL: ', err)\n if div_container:\n for div in div_container:\n # it = {'item_id':'','name':'', 'brand':'', 'url':'', 'img_url':'', 'price':'', 'sale-tag':'', 'final-price':''}\n item_id = None\n item_path = div['data-category']\n item_name = div.a['title']\n brand = div['data-brand']\n item_url = div.a['href']\n img_url = div.img['src']\n regular_price = div.find('span', {'class': 'price-regular'}).text\n sale_tag = div.find('span', {'class': 'final-price'}).text[-5:-1]\n final_price = div.find('span', {'class': 'final-price'}).text[:-5].strip()\n\n item = Items(item_id, item_path, cat_id, item_name, brand, item_url,\n img_url, regular_price, sale_tag, final_price)\n if save_db:\n item.save_into_db()\n print(f'SAVE {item_name} INTO DTB')\n result.append(item)\n else:\n break", "def get_db():\n with open(db_file) as f:\n db = json.load(f)\n return db", "def loadDatabase ():\n database = []\n # Open a file\n path = \"lyd/\"\n dirs = os.listdir( path )\n \n # This prints all of the files and directories\n for file in dirs:\n if file == \".DS_Store\": #Mac file\n continue\n songdict = {}\n print (file)\n Zxx = STFTsignal.getSTFTofFile(path + file) #STFT of the file\n #mean, eigen and weights are stored in dictionary songdict\n songdict[\"mean\"], songdict[\"eigen\"], songdict[\"weights\"] = PCA(Zxx)\n songdict[\"name\"] = file\n database.append (songdict) \n return database", "def start_database():\r\n con = sqlite.connect(core.config.DBFILE)\r\n \r\n if con:\r\n cur = con.cursor()\r\n\r\n # Execute the SELECT statement:\r\n cur.execute(\"select * from monrito_systems\")\r\n\r\n # Retrieve all rows as a sequence and print that sequence:\r\n log.debug(cur.fetchall())\r\n return True\r\n else:\r\n return False", "def scrapeProducts(conn):\n query = \"\"\"SELECT c.name, f1.item, f1.product_name, f1.manufacturer,\n f1.supplier, c.id\n FROM form1_row AS f1\n INNER JOIN country AS c\n ON f1.country=c.id\"\"\"\n c = conn.cursor()\n c.execute(query)\n results = []\n for row in c:\n result={}\n result['country'] = row[0]\n result['formulation'] = row[1].replace('*', '')\n result['product'] = row[2]\n result['manufacturer'] = row[3]\n result['supplier'] = row[4] or None\n result['country_id'] = row[5]\n results.append(result)\n return results", "def test_see_products_for_rent(self):\n\n # clean up\n DropDataUnitTesting.drop_data()\n tables = ['customers', 'products', 'rentals']\n mongo_drop_table = mdb.DropData(tables)\n result = mongo_drop_table.drop_table()\n print(result)\n # create new unittest data\n ImportUnitTestData.import_data_handler()\n with TestDatabase.mongo:\n norton_db = TestDatabase.mongo.connection.UnitTestNortonDB\n unittest_products_list = []\n try:\n products = norton_db['products']\n products_collection = products.find()\n for document in products_collection:\n unittest_products_list.append('{0}, qty available({1})'.\n format(document['description'],\n document['quantity_available']))\n except OperationFailure as e:\n logger.error('mongo retrieve table error %s', e, exc_info=True)\n unittest_products_list = ['mongo retrieve table error {}'.format(e)]\n if len(unittest_products_list) == 0:\n unittest_products_list.append('no products found')\n # create new data\n try:\n current_dir = os.getcwd()\n directory_name = current_dir + '\\\\lesson5\\\\data\\\\'\n file_name_dict = {'products': 'products.csv', 'customers': 'customers.csv',\n 'rentals': 'rentals.csv'}\n for key, value in file_name_dict.items():\n tmp_file = directory_name + value\n mongo_insert = mdb.ImportData(key, tmp_file)\n results = mongo_insert.import_data()\n print(results)\n except FileNotFoundError as e:\n logger.error('exception %s', e, exc_info=True)\n result = 'exception {}'.format(e)\n print(result)\n test_products = mdb.ShowProductsAndCustomers()\n my_test_list = test_products.see_products_for_rent()\n self.assertEqual(my_test_list, unittest_products_list)", "def load_products_data(connection, csvfile):\n insert_sql = 'insert into products (id, description, genres) ' \\\n 'values (%s, %s, %s)'\n load_data(connection, insert_sql, get_data_from_file(csvfile))", "def __init__(self):\n self.conn = psycopg2.connect(dbname=DB, user=DB_USER, password=DB_PW, host=HOST, port=PORT)\n self.categories = self.fill_category()\n self.fill_products()" ]
[ "0.7133278", "0.7089956", "0.6694715", "0.66771376", "0.6661934", "0.6568496", "0.65388924", "0.64580315", "0.64211625", "0.63623816", "0.63278025", "0.631014", "0.628087", "0.62696344", "0.62645537", "0.6238003", "0.6236366", "0.6181283", "0.6149317", "0.61312556", "0.6116168", "0.6100561", "0.60865474", "0.60373616", "0.6023134", "0.6007201", "0.5997458", "0.5996659", "0.5966875", "0.59306854", "0.5870467", "0.58555126", "0.5849609", "0.58469146", "0.5831527", "0.58254844", "0.5787111", "0.57712334", "0.57556665", "0.5738541", "0.5737296", "0.57323897", "0.5729088", "0.57268775", "0.5720335", "0.57075536", "0.57032746", "0.56957114", "0.56683975", "0.56602395", "0.5659832", "0.5639442", "0.563567", "0.5635438", "0.56254303", "0.56252843", "0.56247944", "0.56243664", "0.5623362", "0.562127", "0.5620208", "0.5615781", "0.56132495", "0.5609948", "0.56076723", "0.5604475", "0.56024873", "0.5599708", "0.5594219", "0.55897987", "0.55840325", "0.5578912", "0.55781376", "0.5576318", "0.5576255", "0.5574148", "0.55671823", "0.5561532", "0.55543715", "0.55387884", "0.5534959", "0.55276406", "0.552682", "0.5526116", "0.55256504", "0.5523912", "0.5516487", "0.551356", "0.5510907", "0.55083", "0.5507751", "0.55068076", "0.5502259", "0.5501651", "0.5500433", "0.54843336", "0.54738814", "0.5471885", "0.5471184", "0.5468188" ]
0.63234115
11
This function reads the transaction data
def read_transactions(filename=None): if not filename: filename = settings.TRANSACTION_FILENAME return pd.read_csv(filename, sep='|', parse_dates=[4])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ReadTransaction(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def _read_data(self):", "def readOneData(self):\n\t\tpass", "def read(self):", "def read_data(self):\n raise NotImplementedError", "def ReadTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None):\n raise NotImplementedError()", "def _read_transaction(self, tx_fun, **kwargs):\n # Wrapper for neo4j.Session.read_transaction\n with self._driver.session() as session:\n result = session.read_transaction(tx_fun, **kwargs)\n return result", "def load_data():\n dataFile = open('transactions.json', 'r')\n data = json.load(dataFile)\n transactions = data['transactions']#TID\n items = data['items']#item sets\n return items, transactions", "def ReadTransaction(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def ReadTransaction(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def read_trade() -> dict:\n try:\n ddb_table.get_item(Key = data_to_get)\n except Exception as e:\n _LOGGER.error('Problem updating dynamodb trade data. {0}'.format(e))\n raise Exception('Problem updating dynamodb trade data. {0}'.format(e))", "def getTxnFromData(self, oid, back):\n h = self._read_data_header(back, oid)\n return h.tid", "def read():\n # TODO", "def readData(self, key, context):\n\n print(\"key: {}\".format(key))\n address = _make_benchcontract_address(\"key_{}\".format(key))\n print(\"address: {}\".format(address))\n data = context.get_state(\n [address],\n timeout=self.timeout)\n print(\"readData obtained {} --> {} from state\".format(key, data[\"data\"]))\n return 0", "def read_raw_data(self):\n # Must be set by the user\n raise Exception(\"not implemented\")", "def read(self):\n pass", "def readtrans(self, addr):\n self._start_transaction(write=False, address=addr)\n self.address.next = addr\n self.read.next = True\n to = 0\n while self.waitrequest and to < self.timeout:\n yield self.clock.posedge\n self.read.next = False\n self._end_transaction(self.readdata)", "def read(self, sacc_data: sacc.Sacc) -> None:", "def read(self, sacc_data: sacc.Sacc) -> None:", "def read(self, sacc_data: sacc.Sacc) -> None:", "async def test_tx_metadata(self):\n data_hex = pkg_resources.resource_string(__name__, os.path.join('data', 'tx_metadata_01.txt'))\n data_bin = binascii.unhexlify(data_hex)\n reader = x.MemoryReaderWriter(bytearray(data_bin))\n ar = xmrb.Archive(reader, False, xmr.hf_versions(9))\n\n msg = xmr.PendingTransaction()\n await ar.root()\n await ar.message(msg)\n\n self.assertEqual(msg.tx_key,\n binascii.unhexlify(b'a0a50810dbc38101a79525823428b500ac936dfea613c73b4864f7260ff26a0b'))\n self.assertEqual(msg.change_dts.amount, 99972803971000)\n self.assertEqual(msg.fee, 9119110000)\n self.assertEqual(msg.construction_data.use_rct, 1)\n self.assertEqual(len(msg.construction_data.extra), 44)\n self.assertEqual(len(msg.construction_data.sources), 1)\n self.assertEqual(msg.construction_data.sources[0].amount, 100000000000000)\n self.assertEqual(msg.construction_data.sources[0].mask,\n binascii.unhexlify(b'2dea8778cf4e89a7f32b5659d674d44795a370a00f79ee9b2ea37c1fcb005c0d'))\n self.assertEqual(len(msg.construction_data.sources[0].outputs), 7)\n self.assertEqual(msg.construction_data.sources[0].outputs[6][0], 1727)\n self.assertEqual(msg.construction_data.sources[0].outputs[6][1].mask,\n binascii.unhexlify(b'2eeec82a970bfa54c35b0b740f6fb0585de14818e3c6dceed75c76fe69e3e449'))\n\n self.assertEqual(len(msg.construction_data.dests), 1)\n self.assertEqual(len(msg.construction_data.splitted_dsts), 2)\n self.assertEqual(msg.construction_data.splitted_dsts[0].amount, 18076919000)\n self.assertEqual(msg.construction_data.splitted_dsts[1].amount, 99972803971000)\n self.assertEqual(len(msg.construction_data.subaddr_indices), 1)\n\n writer = x.MemoryReaderWriter()\n ar2 = xmrb.Archive(writer, True, xmr.hf_versions(9))\n await ar2.root()\n await ar2.message(msg)\n self.assertEqual(data_bin, bytearray(writer.get_buffer()))\n\n msg.construction_data.use_bulletproofs = False\n writer = x.MemoryReaderWriter()\n ar2 = xmrb.Archive(writer, True, xmr.hf_versions(9))\n await ar2.root()\n await ar2.message(msg)", "def readMuchData(self, len, start, context):\n\n print(\"start: {}\".format(start))\n print(\"end: {}\".format(start + len))\n\n sum = 0\n\n for i in range(start, start + len):\n try:\n key = \"key_{}\".format(i)\n address = _make_benchcontract_address(key)\n print(\"address: {}\".format(address))\n except:\n print(\"Some error\")\n\n try:\n data = context.get_state(\n [address],\n timeout=self.timeout)\n print(\"Data: {}\".format(data))\n value = int(data[\"data\"])\n print(\"Obtained {} --> {} from state\".format(key, value))\n # sum = sum + value\n except:\n print(\"No entry found for {}\".format(key))\n\n print(\"total sum: {}\".format(sum))", "def __readData(file_path):\n with zipfile.ZipFile(file_path) as f:\n tmp_path = f.namelist()[0] # ๅŽ‹็ผฉๆ–‡ไปถ้‡Œๅชๆœ‰ไธ€ไธช 'text8' ๆ–‡ไปถ\n print '\\nReading %s/%s' % (file_path, tmp_path)\n content = f.read(tmp_path) # ่ฏปๅ–ๆ–‡ไปถๅ†…ๅฎน\n print 'Finish Reading %s/%s' % (file_path, tmp_path)\n\n print '\\nTransferring data format ...'\n content = content.lower() # ไฟ่ฏๅ…จ้ƒจๅญ—็ฌฆ่ฝฌไธบๅฐๅ†™\n content = [Download.char2Id(i) for i in content] # ๅฐ†ๅญ—็ฌฆๅ…จ้ƒจ่ฝฌไธบ id\n print 'Finish transferring'\n return content", "def read_prepare(self):\n # PROTECTED REGION ID(AsyncTabata.prepare_read) ENABLED START #\n return self._prepare\n # PROTECTED REGION END # // AsyncTabata.prepare_read", "def read(self):\n try:\n datos = self.base.readData()\n for i in range(len(datos)):\n self.verDatos.insert('', i+1, text = i+1, values = (datos[i][0], datos[i][1], datos[i][2]))\n except:\n showerror(\"Error\", exc_info()[1])", "def transaction(self, transaction):\n # Allow for a list of blocks..\n transaction = utils.request_type(transaction)\n\n res = r.get(self.url + self.tx_info + str(transaction))\n return self.execute(res)", "def read(self):\n raise NotImplementedError", "def test_get_transaction_details_request(self):\n self.trans_details.get_transaction_details(\n trans_id = 123456,\n )", "async def process_get_txn(self, int txn) -> str:\n\n return json.dumps(json.loads(await ledger.build_get_txn_request(self.did, txn))['result']['data'])", "def read_data(self) -> List[BaseRecord]:\n pass", "def _read_data(self, path: str) -> T:\n raise NotImplementedError", "def test_read_with_no_locks(self):\n\n transaction = Transaction(\"T1\", TransactionType.READ_WRITE, 1)\n instruction = Instruction(\"R(T1, x2)\")\n\n self.assertEquals(self.data_manager.read(transaction, instruction), \"20\")", "def readData(self):\n if (self.model == 'GDS'):\n self.write(':ACQ'+str(ch)+':MEM?\\n')\n elif (self.model == 'TDS'):\n self.write('CURVe?\\n')\n\n # Check for the initial '#'; if not present, raise error.\n if (self.read(1) != '#'):\n raise Exception, \"Expected header not present\"\n\n # Read the data length indicator\n dataSize = int(self.read(int(self.read(1))))\n\n # extra steps for GDS\n if (self.model == 'GDS'):\n # subtract the 8 bytes we will read.\n dataSize -= 8\n # Read the sampling period\n hstep = struct.unpack('>f', self.read(4))[0]\n # also, fix hoff so it corresponds with that for TDS\n # FIXME: check with the scope at some point.\n hoff = hoff - float(dataSize/4) * hstep\n # Read 4 bytes to advance to the actual data: first byte\n # contains the channel and the three are not used,\n # according to the GDS800 manual.\n self.read(4)\n \n # Read data; TDS expects a 1-byte data, GDS expects 2-byte one.\n if (self.model == 'TDS'):\n data = list(struct.unpack('>'+str(dataSize)+'b',\n self.read(dataSize)))\n # TDS has a trailing '\\n' that should be drained.\n self.read(1)\n elif (self.model == 'GDS'):\n data = list(struct.unpack('>'+str(dataSize/2)+'h',\n self.read(dataSize)))\n\n return data", "def read_data(self, cond=None):\n data, last, _id, user, dest, valid = self.tdata.read_data(cond)\n return data, last, _id, user, dest, valid", "def read_serial_data(self):\n qdata = list(get_all_from_queue(self.data_q))\n if len(qdata) > 0:\n data = self.data+''.join(qdata)\n while data.find(\"Id: \")!=-1:\n msgStart = data.find(\"Id: \")\n msgEnd = data.find(\"\\n\",msgStart)\n if msgEnd == -1:\n break\n\n packet = data[msgStart:msgEnd-1]\n # print \"msg: [%s]\" % packet\n msgId = int(packet[4:8],16)\n # print \"msgId: %d [%x]\" % (msgId, msgId)\n msgData = map(lambda x: int(x,16) ,packet[16:].split(\" \"))\n # print \"data: \", msgData\n self.update_data(msgId, msgData)\n\n data = data[msgEnd:]\n self.data = data", "def test_wallets_get_transaction_list(self):\n pass", "def getTxDataIn(self):\n \n return self.tx_data_in", "def test_get_transaction_list_request(self):\n self.trans_details.get_transaction_list(\n batch_id = 123456,\n )", "def _read_data(self, txtfile):\n data_string = open(txtfile,'r').read()\n return data_string", "def read(self) -> bytes | None:", "def read(self) -> bytes | None:", "def read_data(self, cond=None):\n ready = make_condition(cond)\n val = 1 if ready is None else ready\n\n _connect_ready(self.tready._get_module(), self.tready, val)\n\n data = self.tdata\n valid = self.tvalid\n last = self.tlast\n _id = self.tid\n user = self.tuser\n dest = self.tdest\n\n return data, last, _id, user, dest, valid", "def readData(self, dataDict):\n pass", "def get_trade_data(self):\n trade_list = [] # Create an empty list for storing non-cross trade information\n cross_trade_list = [] # Create an empty list for storing cross trade information\n bin_data = gzip.open(os.path.join(self.ITCH_data_path), 'rb') # Load data\n bin_data.read(2) # Pass the initial 2 control characters\n msg_type = bin_data.read(1) # Read the first message type\n # Loop through all messages\n while msg_type:\n assert(msg_type in [b'S', b'R', b'H', b'Y', b'L', b'V', b'W', b'K', b'J', b'h', b'A', \\\n b'F', b'E', b'C', b'X', b'D', b'U', b'P', b'Q', b'B', b'I']) # Insure msg_type is valid\n if msg_type == b'S':\n bin_data.read(13)\n elif msg_type == b'R':\n bin_data.read(40)\n elif msg_type == b'H':\n bin_data.read(26)\n elif msg_type == b'Y':\n bin_data.read(21)\n elif msg_type == b'L':\n bin_data.read(27)\n elif msg_type == b'V':\n bin_data.read(36)\n elif msg_type == b'W':\n bin_data.read(13)\n elif msg_type == b'K':\n bin_data.read(29)\n elif msg_type == b'J':\n bin_data.read(36)\n elif msg_type == b'h':\n bin_data.read(22)\n elif msg_type == b'A':\n bin_data.read(37)\n elif msg_type == b'F':\n bin_data.read(41)\n elif msg_type == b'E':\n bin_data.read(32)\n elif msg_type == b'C':\n bin_data.read(37)\n elif msg_type == b'X':\n bin_data.read(24)\n elif msg_type == b'D':\n bin_data.read(20)\n elif msg_type == b'U':\n bin_data.read(36)\n elif msg_type == b'P': # Process and store information of non-cross trades\n msg = bin_data.read(43)\n record = self.trade_message(msg)\n trade_list.append(record) \n bin_data.read(2)\n elif msg_type == b'Q': # Process and store information of cross trades\n msg = bin_data.read(39)\n record = self.cross_trade_message(msg)\n cross_trade_list.append(record) \n bin_data.read(2)\n elif msg_type == b'B':\n bin_data.read(20)\n elif msg_type == b'I':\n bin_data.read(51)\n msg_type = bin_data.read(1)\n # Convert list to DataFrame\n trade_df = pd.DataFrame(trade_list, columns = ['time', 'end_time_hour', 'type', 'shares', 'stock', 'price'])\n cross_trade_df = pd.DataFrame(cross_trade_list, columns = ['time', 'end_time_hour', 'type', 'shares', 'stock', 'price'])\n self.trade_df = trade_df\n self.cross_trade_df = cross_trade_df\n return self.trade_df, self.cross_trade_df", "def read_data(self, loc):\n pass", "def testReadDataFile(self):\n try:\n blockNameList = []\n myReader = ParseCifSimple(self.__pathPdbxDataFile, False, 0, 255, \"?\", self.__logFileName)\n blockNameList = myReader.GetBlockNames(blockNameList)\n #\n for blockName in blockNameList:\n block = myReader.GetBlock(blockName)\n tableNameList = []\n tableNameList = block.GetTableNames(tableNameList)\n for tableName in tableNameList:\n table = block.GetTable(tableName)\n columnNameList = table.GetColumnNames()\n logger.debug(\"Table %s colunms %r\", tableName, columnNameList)\n numRows = table.GetNumRows()\n rowList = []\n for iRow in range(0, numRows):\n row = table.GetRow(iRow)\n rowList.append(row)\n logger.debug(\"table %s row length %d\", tableName, len(rowList))\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n self.fail()", "def test_read_data(dbh):\n assert len(dbh.trading_history) == 0\n dbh.read_data()\n assert len(dbh.trading_history) > 0\n dbh.trading_history = []\n assert len(dbh.trading_history) == 0\n dbh.read_data(currentdir+'/../test/test_data/trading_log.json')\n assert len(dbh.trading_history) > 0", "def getDataTransaction(self, down, up):\n if down.isDataTransaction():\n return down\n else:\n return up", "def get_data(self):\n\n self.read_expression()\n self.read_tfs()\n self.read_metadata()\n self.set_gold_standard_and_priors()", "def Read(self):\n try:\n file_object = self._zip_file.open(self._stream_name, mode='r')\n except KeyError as exception:\n raise IOError(\n 'Unable to open stream with error: {0!s}'.format(exception))\n\n try:\n entry_data = file_object.read(self._TABLE_ENTRY_SIZE)\n while entry_data:\n table_entry = self._TABLE_ENTRY.parse(entry_data)\n\n self._timestamps.append(table_entry.timestamp)\n entry_data = file_object.read(self._TABLE_ENTRY_SIZE)\n\n except construct.FieldError as exception:\n raise IOError(\n 'Unable to read table entry with error: {0!s}'.format(exception))\n\n finally:\n file_object.close()", "def _moneta_form_get_tx_from_data(self, data):\n # reference, trans_id, fingerprint = data.get('x_invoice_num'), data.get('x_trans_id'), data.get('x_MD5_Hash')\n reference, trans_id, fingerprint = data.get('MNT_TRANSACTION_ID'), data.get('MNT_OPERATION_ID'), data.get('MNT_SIGNATURE')\n if not reference or not trans_id or not fingerprint:\n error_msg = 'moneta: received data with missing reference (%s) or trans_id (%s) or fingerprint (%s)' % (reference, trans_id, fingerprint)\n _logger.error(error_msg)\n raise ValidationError(error_msg)\n tx = self.search([('reference', '=', reference)])\n if not tx or len(tx) > 1:\n error_msg = 'moneta: received data for reference %s' % (reference)\n if not tx:\n error_msg += '; no order found'\n else:\n error_msg += '; multiple order found'\n _logger.error(error_msg)\n raise ValidationError(error_msg)\n return tx[0]", "def _readData(self):\n # Debug. This fn should be called only after checking canRead()\n if not self._canRead():\n raise Exception(\"Trying to read more data than there is.\")\n\n data = self.buffer[:self._expectedByteCount]\n self.buffer = self.buffer[self._expectedByteCount:]\n\n return data", "def read_transactions(path: str) -> pd.DataFrame:\n\n # Load transactions from file\n if path.endswith(\".xlsx\"):\n if str(environ.get(\"DEBUG_MODE\", \"false\")).lower() != \"true\":\n warnings.filterwarnings(\n \"ignore\", category=UserWarning, module=\"openpyxl\"\n )\n transactions = pd.read_excel(path)\n elif path.endswith(\".csv\"):\n transactions = pd.read_csv(path)\n\n return transactions", "def raw_get_transaction(cls, txid):\n r = requests.get(cls.MAIN_TX_API.format(txid), timeout=DEFAULT_TIMEOUT)\n r.raise_for_status() # pragma: no cover\n return r.json()", "def read(self, *args, **kwargs):\n pass", "async def test_get_genomic_tx_data(test_db, genomic_tx_data):\n resp = await test_db.get_genomic_tx_data(\"NM_004333.4\", (2145, 2145))\n assert resp == {\n \"gene\": \"BRAF\",\n \"strand\": \"-\",\n \"tx_pos_range\": (2053, 2188),\n \"alt_pos_range\": (140739811, 140739946),\n \"alt_aln_method\": \"splign\",\n \"tx_exon_id\": 780496,\n \"alt_exon_id\": 6619852,\n \"tx_ac\": \"NM_004333.4\",\n \"alt_ac\": \"NC_000007.14\",\n \"pos_change\": (92, 43),\n \"alt_pos_change_range\": (140739854, 140739854)\n }", "def _moneta_form_get_tx_from_data(self, data):\n # reference, trans_id, fingerprint = data.get('x_invoice_num'), data.get('x_trans_id'), data.get('x_MD5_Hash')\n reference, trans_id, fingerprint = data.get('MNT_TRANSACTION_ID'), data.get('MNT_OPERATION_ID'), data.get('MNT_SIGNATURE')\n if not reference: \n # ัƒะฑั€ะฐะฝะพ ะดะปั ั‚ะตัั‚ะธั€ะพะฒะฐะฝะธั\n # or not trans_id or not fingerprint:\n error_msg = 'Moneta.ru: received data with missing reference (%s) or trans_id (%s) or fingerprint (%s)' % (reference, trans_id, fingerprint)\n _logger.error(error_msg)\n raise ValidationError(error_msg)\n tx = self.search([('reference', '=', reference)])\n if not tx or len(tx) > 1:\n error_msg = 'Moneta.ru: ะฟะพะปัƒั‡ะตะฝะฐ ะธะฝั„ะพั€ะผะฐั†ะธั ะฟะพ ะพะฟะปะฐั‚ะต ัั‡ะตั‚ะฐ %s' % (reference)\n if not tx:\n error_msg += '; ั‚ะฐะบะพะน ัั‡ะตั‚ ะฝะต ะฝะฐะนะดะตะฝ'\n else:\n error_msg += '; ั‚ะฐะบะธั… ัั‡ะตั‚ะพะฒ ะฝะตัะบะพะปัŒะบะพ'\n _logger.error(error_msg)\n raise ValidationError(error_msg)\n return tx[0]", "def _parse_tx_infos(self, gtf_path):\n if os.path.exists('_tx_cache.bin'):\n with open('_tx_cache.bin', 'rb') as f:\n return pickle.load(f)\n result = []\n with gzip.open(gtf_path, 'rt') as f:\n for i, line in enumerate(f):\n if i % 1000 == 0:\n print('processed {}'.format(i), file=sys.stderr)\n if line.startswith('#'):\n continue\n if line.split('\\t', 3)[2] != 'transcript':\n continue\n record = GTFFeature.parse(line)\n if record.feature != 'transcript':\n continue\n result.append(\n TranscriptInfo(record.attrs['gene_id'],\n record.attrs['transcript_id'],\n record.attrs['transcript_type'],\n record.seqname,\n record.start,\n record.end))\n with open('_tx_cache.bin', 'wb') as g:\n pickle.dump(result, g)\n print(len(result), file=sys.stderr)\n return result", "def _wbt_read_data(self):\n\t\treaData = True\n\t\twbtIncomingData = False\n\t\twbtData = \"\"\n\t\twhile reaData:\n\t\t\twbtIncomingMsg = self._webots.stdout.readline().rstrip(\"\\n\").split()\n\t\t\tif \"COMM_OUT\" in wbtIncomingMsg: wbtIncomingData = True\n\t\t\telif \"END\" in wbtIncomingMsg: reaData = False\n\t\t\telif wbtIncomingData: wbtData += \" \".join(wbtIncomingMsg[1:])+\"\\n\"\n\t\t\tprint \"\\t\\t\\t\\tWebots: :\"+\" \".join(wbtIncomingMsg[1:])\n\t\treturn wbtData", "def _load_transactions(self):\r\n\t\tlogger.debug(\"Enter\")\r\n\t\ttry:\r\n\t\t\twith open(self._state_file, 'rb') as tmp:\r\n\t\t\t\tlogger.debug(\"There is a file.\")\r\n\t\t\t\ttmp_dict = pickle.load(tmp)\r\n\t\t\t\tlogger.debug(\"Dictionary loaded from file: %s\" % tmp_dict)\r\n\t\texcept IOError as e: # File doesn't exists\r\n\t\t\tlogger.debug(\"Exit - No file. Error message: %s\" % e)\r\n\t\t\ttmp_dict = {}\r\n\t\t\t\r\n\t\treturn tmp_dict", "def read(self) -> int:\n ...", "def _get_data(self):\n with open(self.filename, 'r') as fid:\n # we are not interested in the first line\n fid.readline()\n # second line\n line = fid.readline().strip()\n # the temperature is written in milli-degrees in the form\n # t=23456, but preceeded by a large HEX data dump in the form\n # 2c 00 4b 46 ff ff 0e 10 17 t=21875\n index = line.find('t=') + 2\n temperature = int(line[index:index + 6]) / 1e3\n time_now = self.get_timestamp()\n\n logging.debug(\n 'w1_temp: {0}, datetime: {1}, logger_id: {2}'.format(\n temperature,\n time_now,\n self.logger_id))\n\n ins = self.table(value=temperature,\n logger_id=self.logger_id,\n datetime=time_now)\n\n self.session.add(ins)\n self.session.commit()", "def _retrieve_transaction_table_input(self, execution_arn: str) -> Dict:\n response = self.client.get_execution_history(executionArn=execution_arn,maxResults=1000)\n events = response[\"events\"]\n record_purchase_entered_events = [\n event\n for event in events\n if event[\"type\"] == \"TaskStateEntered\" and event[\"stateEnteredEventDetails\"][\"name\"] == \"InsertPurchase\"\n ]\n\n record_refund_entered_events = [\n event\n for event in events\n if event[\"type\"] == \"TaskStateEntered\" and event[\"stateEnteredEventDetails\"][\"name\"] == \"InsertRefund\"\n ]\n\n record_error_entered_events = [\n event\n for event in events\n if event[\"type\"] == \"TaskStateEntered\" and event[\"stateEnteredEventDetails\"][\"name\"] == \"InsertError\"\n ]\n \n self.assertTrue(\n record_purchase_entered_events,\n \"Cannot find InsertPurchase TaskStateEntered event\",\n )\n self.assertTrue(\n record_refund_entered_events,\n \"Cannot find InsertPurchase TaskStateEntered event\",\n )\n self.assertTrue(\n record_error_entered_events,\n \"Cannot find InsertPurchase TaskStateEntered event\",\n )\n purchase_table_input=[] #PurchaseTable inputs\n refund_table_input=[] # RefundTable inputs\n error_table_input=[] # ErrorTable inputs\n for transaction in record_purchase_entered_events:\n transaction_input = json.loads(transaction[\"stateEnteredEventDetails\"][\"input\"])\n\n purchase_table_input.append(transaction_input)\n self.inserted_purchase_record_id.append(transaction_input[\"TransactionId\"]) # save this ID for cleaning up PurchaseTable\n\n for transaction in record_refund_entered_events:\n transaction_input = json.loads(transaction[\"stateEnteredEventDetails\"][\"input\"])\n\n refund_table_input.append(transaction_input)\n self.inserted_refund_record_id.append(transaction_input[\"TransactionId\"]) # save this ID for cleaning up RefundTable\n\n for transaction in record_error_entered_events:\n transaction_input = json.loads(transaction[\"stateEnteredEventDetails\"][\"input\"])\n\n error_table_input.append(transaction_input)\n self.inserted_error_record_id.append(transaction_input[\"TransactionId\"]) # save this ID for cleaning up ErrorTable\n\n return purchase_table_input, refund_table_input, error_table_input", "def get_data(self):", "def read(self) -> bytes:\n pass", "def _get_data(self, pd=False):\n r = requests.post(self._url, self._header)\n compressed_data = ZipFile(StringIO(r.content))\n data = {name: compressed_data.read(name)\n for name in compressed_data.namelist()}\n data = data[data.keys()[0]]\n if pd:\n data = pandas.read_csv(StringIO(data))\n return data\n data = data.split('\\n')\n data = [datum for datum in data if datum]\n return data", "def get_transaction_detail(payload):\n response = requests.post(url, data=payload)\n return response.json()", "def get_transaction(self, excludes_list):\n response = client.get(self.url, \"transactions\", {\"exclude_hash\": excludes_list})\n if response.status == 200:\n print(\"Transaction successfully received\")\n return Transaction.parse(response.data)\n elif response.status == 404:\n # print(\"no request to be received\")\n return None\n else:\n print(\"Unknown error while requesting transaction\")\n return None", "def get_data(self, path, owner='*'):\n sql = sa.select([history.c.data, history.c.content_type]).select_from(sa.join(history, active)).where(active.c.path == path)\n result = self.engine.execute(sql).first()\n if result:\n data, ctype = result\n if ctype == 'application/msgpack':\n import msgpack\n return msgpack.unpackb(data, encoding='utf8')\n else:\n return data, ctype", "def read_data(self):\n temperature_data = RS485.read_temperature(self.data_path)\n humidity_data = RS485.read_humidity(self.data_path)\n moisture_data = RH_010_GN.read_moisture(self.data_path)\n o2_data = LB_856.read_o2(self.data_path)\n co2_data = LB_856.read_co2(self.data_path)\n\n self.data = [temperature_data, humidity_data, moisture_data, o2_data, co2_data]", "def get_df_transactions():\n\n _, res = DBX.files_download(c.io.FILE_TRANSACTIONS)\n return pd.read_excel(io.BytesIO(res.content), index_col=0)", "def transaction_data(self):\n return list(map(lambda transaction:transaction.to_json(), self.transaction_map.values()))", "def read():\n # checks if existing alarms exist and places them in a list for faster data manipulation\n event_log(\"reading event database....\",\"\")\n data_file = open(read_json(\"Event_database\"), \"r+\")\n temp_list = []\n if os.stat(read_json(\"Event_database\")).st_size > 0:\n for z in data_file:#reads each line of file\n temp = \"\"\n for element in z:\n if element == \",\":#looks for comma as its used for seperating data in file\n temp_list.append(temp)\n temp = \"\"\n else:\n temp = temp + element\n Events_list.append(temp_list.copy())\n if math.floor(time.time()) - (convert_to_epoch(temp_list[1])) < 0:#determines if event is not expired\n events.enter(-(math.floor(time.time()) - (convert_to_epoch(temp_list[1]))), 1, expired_alarm)\n else: # already expired\n expired_alarm()\n temp_list.clear()\n data_file.close()", "def getStockData():\n pass", "def read_file(self):\n self.write({\"datas\": self.choose_file})\n self._cr.commit()\n import_file = BytesIO(base64.decodestring(self.datas))\n file_read = StringIO(import_file.read().decode())\n reader = csv.DictReader(file_read, delimiter=\",\")\n return reader", "def loadData(self,ins):\n self.isDeleted = False\n #--Read subrecords\n bytesRead = 0\n while bytesRead < self.size:\n (name,size) = ins.unpackSubHeader('BOOK')\n srData = ins.read(size,'BOOK.'+name)\n bytesRead += 8+size\n if name == 'NAME': self.id = cstrip(srData)\n elif name == 'MODL': self.model = cstrip(srData)\n elif name == 'FNAM': self.title = cstrip(srData)\n elif name == 'BKDT':\n (self.weight,self.value,self.isScroll,self.teaches,self.enchantPoints\n ) = struct.unpack('f4i',srData)\n elif name == 'SCRI': self.script = cstrip(srData)\n elif name == 'ITEX': self.icon = cstrip(srData)\n elif name == 'TEXT': self.text = cstrip(srData)\n elif name == 'ENAM': self.enchant = cstrip(srData)\n #--Deleted?\n elif name == 'DELE': self.isDeleted = True\n #--Bad record?\n else: \n raise Tes3Error(self.inName,_('Extraneous subrecord (%s) in %s record.') \n % (name,self.name))", "def Read(self, *args, **kwargs):\n pass", "def s3_read_data(self):\n\n self.k.open()\n self.k.read()", "def loadData(self,ins):\n #--Read subrecords\n bytesRead = 0\n objectId = None\n while bytesRead < self.size:\n (name,size) = ins.unpackSubHeader(self.name)\n #print name,size\n bytesRead += 8+size\n subData = ins.read(size, self.name+'.'+name)\n #--Id?\n if name == 'NAME':\n self.id = cstrip(subData)\n #--Flags\n elif name == 'DATA':\n flags = struct.unpack('i',subData)[0]\n if self.name == 'LEVC':\n self.calcFromAllLevels = (flags & 1) == 1\n else:\n self.calcForEachItem = (flags & 1) == 1\n self.calcFromAllLevels = (flags & 2) == 2\n #--Chance None\n elif name == 'NNAM':\n self.chanceNone = struct.unpack('B',subData)[0]\n #--Count\n elif name == 'INDX':\n self.count = struct.unpack('i',subData)[0]\n #--Creature/Item Id?\n elif name == 'CNAM' or name == 'INAM':\n objectId = cstrip(subData)\n #--PC Level\n elif name == 'INTV':\n pcLevel = struct.unpack('h',subData)[0]\n self.entries.append((pcLevel,objectId))\n objectId = None\n #--Deleted?\n elif name == 'DELE': \n self.isDeleted = True\n #--Else\n else: raise Tes3UnknownSubRecord(self.inName,name,self.name)\n #--No id?\n if not self.id:\n raise Tes3Error(self.inName,_('No id for %s record.') % (self.name,))\n #--Bad count?\n if self.count != len(self.entries):\n self.count = len(self.entries)\n self.setChanged()", "def in_transaction(self):\n # We likely just changed data - give it a second to catch up\n time.sleep(0.1) # I think I keep reading journal watermark too soon without this\n \n # Get relevant data\n water_mark = pos.read_journal_watermark()\n self.log.info(f\"Watermark: [{water_mark}]\")\n balance = pos.read_balance()['Total']\n self.log.info(f\"Balance: [{balance}]\")\n \n # Decide if we need more checks based on watermark\n if water_mark == \"TRANSACTION IN PROGRESS\":\n self.log.info(\"In Transaction: In Transaction Watermark found\")\n return True\n elif water_mark == \"TRANSACTION COMPLETE\" or water_mark == \"TRANSACTION VOIDED\":\n self.log.info(\"Not in Transaction: Transaction Complete/Voided watermarks found\")\n return False\n else:\n # No watermark - decide based on balance\n if balance == \"$0.00\":\n self.log.info(\"Not in Transaction: $0 balance with no watermark\")\n return False\n else:\n self.log.info(\"In Transaction: Non-$0 balance with no watermark\")\n return True", "async def test_txn_get(self):\n self.stream.preset_response(transaction=Mocks.make_txns('1')[0])\n\n response = await self.get_assert_200('/transactions/1')\n self.stream.assert_valid_request_sent(transaction_id='1')\n\n self.assertNotIn('head', response)\n self.assert_has_valid_link(response, '/transactions/1')\n self.assertIn('data', response)\n self.assert_txns_well_formed(response['data'], '1')", "def read_all(self):\r\n pass", "def get_data():\n pass", "def loadData(self,ins):\n #--Read subrecords\n bytesRead = 0\n while bytesRead < self.size:\n (name,size) = ins.unpackSubHeader('GLOB')\n srData = ins.read(size,'GLOB.'+name)\n bytesRead += 8+size\n if name == 'NAME': self.id = cstrip(srData)\n elif name == 'FNAM': self.type = srData\n elif name == 'FLTV': self.value = struct.unpack('f',srData)\n #--Deleted?\n elif name == 'DELE': self.isDeleted = True\n #--Bad record?\n else: raise Tes3UnknownSubRecord(self.inName,name,self.name)", "def read(self, **kwargs):\n pass", "def read(self):\n\t\treturn self.input_file.read(1)", "def getData(self):\n self.ser.write(b'g')\n readString = self.ser.readline()\n print(readString)\n readString = readString.decode(\"utf-8\")\n splittedString = readString.split('\\t')\n for i, num in enumerate(splittedString):\n try:\n splittedString[i] = int(float(num))\n except ValueError:\n pass\n self.accString.set('Accleration\\nX: %.5f\\nY: %.5f\\nZ: %.5f' %\n (splittedString[0], splittedString[1],\n splittedString[2]))\n self.logFile.write(readString)\n self.comJob = root.after(10, self.getData)", "def ReadData( fName = '/tmp/chartdata' ):\n blocks = common.ReadDataFromFile( fName )\n\n return blocks", "def read(self) -> int:", "def Read(self):\n try:\n file_object = self._zip_file.open(self._stream_name, mode='r')\n except KeyError as exception:\n raise IOError(\n 'Unable to open stream with error: {0!s}'.format(exception))\n\n try:\n entry_data = file_object.read(self._TABLE_ENTRY_SIZE)\n while entry_data:\n table_entry = self._TABLE_ENTRY.parse(entry_data)\n\n self._offsets.append(table_entry.offset)\n entry_data = file_object.read(self._TABLE_ENTRY_SIZE)\n\n except construct.FieldError as exception:\n raise IOError(\n 'Unable to read table entry with error: {0!s}'.format(exception))\n\n finally:\n file_object.close()", "def read(self):\n pass", "def get_data(self):\n try:\n data_string = self.ser.readline().decode()\n except UnicodeDecodeError as e:\n return self.get_data()\n \n if not data_string: # check for empty string\n return self.get_data()\n \n if data_string[0] == '|' and data_string[-1] == '\\n' and\\\n self.reset_confirmed:\n # if the data_string is valid, process it\n try:\n data_string = data_string.strip() \n data = data_string.split(',')\n assert len(data) == 7, \"Bad data Length\" \n data = [float(val) for val in data[1:]]\n data[0] /= 1000\n if self.ser.in_waiting: self.ser.reset_input_buffer()\n return data\n except (AssertionError, ValueError) as e:\n print(\"Error:\", type(e), e)\n if self.ser.in_waiting: self.ser.reset_input_buffer()\n return self.get_data()\n\n\n elif data_string[0] == '+' and data_string[-1] == '\\n' and\\\n self.reset_confirmed:\n # if the data_string is a valid time stamp, process it\n # self.system_timestamp = \"\\nSystem start time is: \"\\\n # \"%s\" % strftime(\"%Y/%m/%d %H:%M:%S\", localtime())\n self.arduino_timestamp = data_string.strip()\n print(self.arduino_timestamp)\n return self.get_data()\n \n elif data_string[0] == '/' and data_string[-1] == '\\n':\n # if string begins with / then it is a debug message and should\n # just be returned\n if \"setup finished\" in data_string.lower(): \n self.reset_confirmed = True\n print(data_string.strip())\n return self.get_data()\n else:\n # if the data_string is invalid try again\n return self.get_data()", "def read_content(self):\n pass", "def readAll():\n readTemperature()\n readAirHumidity()\n readSoilHumidity()\n print(\"Success! Temperature and humidity inserted into database.\\n\")\n # DEBUG: Uncomment here for debbuging\n # print(\"Temperatura: \" + read_temperature)\n # print(\"Umidade: \" + read_humidity)", "def tcga_read_data():\n print(\"Downloading data ...\")\n try:\n os.mkdir(cache_directory)\n except FileExistsError:\n pass\n \n brca_path= os.path.join(cache_directory, expression_matrix_name + \".tsv.gz\")\n brca_clin_path = os.path.join(cache_directory, phenotype_name + \".tsv.gz\")\n try:\n brca = pd.read_csv(brca_path, sep=\"\\t\",index_col=0)\n brca_clin = pd.read_csv(brca_clin_path, sep=\"\\t\",index_col=0)\n return brca, brca_clin\n except:\n pass\n\n # Download TCGA data\n tcga_path = os.path.join(cache_directory, tcga_name + \".tar.gz\")\n download_file_if_not_present(tcga_url, tcga_path)\n print(\"Tar-file inplace, extracting tables.\")\n\n # Decompress data into tables\n tf = tarfile.open(tcga_path)\n tf.extract(expression_name, cache_directory)\n tf.extract(clinical_name, cache_directory)\n\n # def get_expression_data(self, path, file):\n df = pd.read_csv(os.path.join(cache_directory, expression_name), sep=\"\\t\")\n df.dropna(axis=0, how='any', inplace=True)\n df.set_index('Entrez_Gene_Id', inplace=True)\n # df.drop(columns=['Unnamed: 0', 'Entrez_Gene_Id'], inplace=True)\n # df.drop(columns=['Entrez_Gene_Id'], inplace=True)\n df.drop(columns=['Hugo_Symbol'], inplace=True)\n brca = df.reindex(sorted(df.columns), axis=1)\n\n # get_clinical_data(brca_clin_path,\"data_clinical_sample.txt\")\n df = pd.read_csv(os.path.join(cache_directory, clinical_name), sep=\"\\t\").T\n df.columns = df.loc[\"Sample Identifier\"]\n df.drop(columns=[\"A unique sample identifier.\",\"STRING\",\"1\",\"SAMPLE_ID\"], inplace=True,errors='ignore')\n if 'TCGA-BH-A1ES-01' in df.columns:\n df.drop(columns=['TCGA-BH-A1ES-01'], inplace=True)\n df.drop(index=[\"Unnamed: 0\",\"#Patient Identifier\",\"Sample Identifier\",\"Other Sample ID\"], inplace=True,errors='ignore')\n brca_clin = df.reindex(sorted(df.columns), axis=1)\n\n brca_clin, brca_clin = tcga_tn_preprocess(brca, brca_clin)\n\n # Put the extracted matrixes to the file cashe, so we do not have to do this again if procedure is repeated.\n brca.to_csv(brca_path, sep=\"\\t\")\n brca_clin.to_csv(brca_clin_path, sep=\"\\t\")\n return brca, brca_clin", "def part_read_contract(cid):\r\n take_pass = request.values.get('take_passwd', '')\r\n with engine.with_session() as ss:\r\n cur_contract = ss.query(LxContract).get(cid)\r\n if not sha256_crypt.verify(take_pass, cur_contract.take_passwd):\r\n return jsonify({'success': False, 'errorMsg': constants.ERROR_CODE[\r\n 'WRONG_PASS_WORD']})\r\n now_version = cur_contract.version\r\n now_contract_file = cur_contract.__getattribute__(\r\n 'contract_v' + str(now_version))\r\n # print now_contract_file.fpath\r\n contract_content = file_biz.get_contract_content(\r\n now_contract_file.fpath)\r\n return jsonify({'success': True, 'data': contract_content})", "def get_data ( self ):\n return self._data_pntr.ReadAsArray()", "def get_data ( self ):\n return self._data_pntr.ReadAsArray()", "def get_transaction(self, i):\n\t\treturn self.transactions[i]", "def get_transaction(self, i):\n\t\treturn self.transactions[i]" ]
[ "0.70743924", "0.69511247", "0.63940114", "0.637214", "0.6344855", "0.6343588", "0.62778604", "0.62692773", "0.62457216", "0.62457216", "0.61528605", "0.6124101", "0.611082", "0.6027867", "0.6005437", "0.5953495", "0.59025943", "0.5887277", "0.5887277", "0.5887277", "0.5878449", "0.5876129", "0.5862847", "0.5810319", "0.5751171", "0.5692086", "0.5690625", "0.56339544", "0.5624202", "0.5620236", "0.56047404", "0.56007", "0.55868655", "0.5583567", "0.5581002", "0.5572818", "0.556811", "0.5557138", "0.55189675", "0.55072206", "0.55072206", "0.55068", "0.5498609", "0.54941154", "0.54682547", "0.5451904", "0.544884", "0.5440837", "0.5440603", "0.544005", "0.5417733", "0.5393172", "0.5388274", "0.5383649", "0.53721356", "0.53603506", "0.53537875", "0.5346502", "0.5346005", "0.53323", "0.5331559", "0.53270835", "0.53267884", "0.53266567", "0.5314355", "0.5306654", "0.5304274", "0.530136", "0.5297635", "0.52969736", "0.5295792", "0.529475", "0.5288073", "0.5286853", "0.52862126", "0.5285449", "0.52789253", "0.5277577", "0.5270449", "0.5269044", "0.52682185", "0.52674764", "0.5266665", "0.52533627", "0.5249658", "0.52484965", "0.5248354", "0.52455604", "0.5244141", "0.523046", "0.5227522", "0.52202195", "0.52107626", "0.52081424", "0.5206862", "0.51880443", "0.5177913", "0.5177913", "0.51753616", "0.51753616" ]
0.56860703
27
This function reads the meteo for a given day
def read_meteo_day(filename=None): if not filename: filename = settings.METEO_DAY_FILENAME return pd.read_csv(filename, sep=';', parse_dates=[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def findReadData(day,scope,chan,shot):\n return readData(conf.dataDir + \"%d_01_2013_osc%d/C%dosc%d-%05d.txt\" % (day, scope, chan, scope, shot),\n conf.timeDelay[scope,chan],\n conf.ampMult[scope,chan])", "def get_weather_on_date(date, meteo_day, store_id):\n return meteo_day[(meteo_day['STO_EAN'] == store_id) & (meteo_day['DATE_KEY'] == date)]", "def get_meteo_files(self, meteo_dir, meteo_interval):\n meteopatterns = []\n meteofiles = []\n\n orig_dir = os.getcwd()\n meteo_interval = meteo_interval[0].lower()\n\n mon_dict = {'1': 'jan', '2': 'feb', '3': 'mar', '4': 'apr',\n '5': 'may', '6': 'jun', '7': 'jul', '8': 'aug',\n '9': 'sep', '10': 'oct', '11': 'nov', '12': 'dec'}\n\n with open(self.fullpath, 'r') as trajfile:\n contents = trajfile.readlines()\n\n for line in contents[1:]:\n if 'OMEGA' in line:\n break\n\n parts = line.split()[1:4]\n\n year = \"{:02}\".format(int(parts[0]))\n month = mon_dict[parts[1]]\n day = _day2filenum(meteo_interval, parts[2])\n\n filestring = '*' + month + '*' + year + '*' + day\n\n meteopatterns.append(filestring)\n\n try:\n os.chdir(meteo_dir)\n\n _, _, files = next(os.walk('.'))\n\n for pattern in meteopatterns:\n for each_file in files:\n if fnmatch.fnmatch(each_file, pattern):\n meteofiles.append(each_file)\n break\n\n finally:\n os.chdir(orig_dir)\n\n if len(meteofiles) == 0:\n raise OSError('No meteorology files found.')\n print(len(meteofiles))\n\n self.meteorology_files = meteofiles", "def readData(self):\n dayToday = self.currentDay()\n \n loopDbInput = True\n \n while loopDbInput == True: #While there is an error\n try:\n self.c.execute(\"SELECT * FROM Enigma WHERE Datum = \" + str(dayToday)) #Select all the data in the record for the current day\n data = self.c.fetchall() #Store the selected data in this variable\n except:\n print('Error reading database. Please choose another database.') #Inform the user that there is an error connecting to the database \n self.reset() #Prompt the user to establish a new database connection\n else:\n loopDbInput = False #Otherwise continue with the program\n return data #And return the daily settings ", "def one_emotion_specific_day(self, emotion, day):\n return self._one_emotion(self.many_emotions_specific_day(day), emotion)", "def meteo(station='caqc0177'):\r\n long=getLongForecast(station)\r\n return dict(\r\n title= long[0] + \" - \" + station,\r\n message=datetime.now(),\r\n year=datetime.now().year,\r\n longTerm=long[1],\r\n shortTerm=getShortForecast(station)\r\n )", "async def daily(self, ctx: commands.Context):\n self.check_if_exist(ctx.guild)\n\n if ctx.invoked_subcommand == None:\n await ctx.reply(\"Options: `channel`, `timezone`, `ping`\")", "def _first_good_date(self, day):\n count = 0\n while True:\n try:\n self.data.loc[day - timedelta(count)]\n return day - timedelta(count)\n except KeyError:\n count += 1", "def day(self):\n data = await self.get_data(LIGHT)\n return data['day']", "def read_meteo_week(filename=None):\n if not filename:\n filename = settings.METEO_WEEK_FILENAME\n\n # This function is the date parser for the week\n parser = lambda x: pd.datetime.strptime(x, '%Y%W')\n\n return pd.read_csv(filename, sep=';', parse_dates=[4], date_parser=parser)", "def get_temperature(self, day):\r\n return self.temperatures[day]", "def get_outdoor_data(temp_dir,site):\n if site == 'berk':\n files_od = glob(join(temp_dir,'outdoor','20*.xlsx'))\n elif site == 'bus':\n files_od = glob(join(temp_dir,'outdoor','Busara*.csv'))\n else:\n raise NameError(site)\n\n dfs = []\n for f in files_od:\n if site == 'berk':\n this_df = pd.read_excel(f,sheet_name=0,usecols='B:D',index_col=0,parse_dates=True, header=1)\n elif site == 'bus':\n this_df = pd.read_csv(f,usecols=[0,1,2],index_col=0,parse_dates=True,header=2)\n \n # drop missing values that prevented conversion to float type\n if this_df.iloc[:,0].dtype != np.float64:\n this_df = this_df[this_df.iloc[:,0] != ' ']\n this_df = this_df.astype(np.float64)\n\n # correct for weird timezones in berkeley datalogger\n this_df = correct_tz(this_df,site)\n \n this_df.columns = ['T','RH']\n this_df.index.name = 'time'\n\n # convert to celsius\n this_df['T'] = (this_df['T'] - 32) * 5/9\n dfs.append(this_df)\n \n df_od = pd.concat(dfs)\n\n # drop duplicated measurements\n df_od = df_od[~df_od.index.duplicated(keep='last')].sort_index()\n \n # separate out into daily min,mean,max\n groups = df_od.groupby(df_od.index.date)\n dfs_od = {'all':df_od,\n 'min': groups.min(),\n 'mean': groups.mean(),\n 'max': groups.max()}\n \n for i in ['min','mean','max']:\n # remove first and last day to ignore days where we did not get full recording\n dfs_od[i] = dfs_od[i].iloc[1:-1,:]\n \n # name index so that we can merge onto multiIndex'd dataframe\n dfs_od[i].index.name = 'date'\n \n return dfs_od", "def get_zakopane_daily_weather():\n zakopane = FiveDaysWeatherForecast(location.get(\"zakopane\", \"\"))\n zakopane_weather_detail = zakopane.get_weather_details()\n zakopane_daily_weather_detail = []\n for data in zakopane_weather_detail:\n zakopane_daily_weather_detail.append(data)\n return zakopane_daily_weather_detail", "def get_day():\n return handle_invalid_inputs(question_4, days)", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def read_weather_data():\n # Check if UTC to gmt+1 conversion is being handled correctly\n weather = pd.read_csv('//datc//opschaler//weather_data//knmi_10_min_raw_data//output//df_combined_uncleaned.csv',\n delimiter='\\t', comment='#',\n parse_dates=['datetime'])\n weather = weather.set_index(['datetime'])\n return weather", "def get_dynamic_data(self, today, settings): # DOY=None, year_doy=None\n\n name_key = 'name_fmt'\n loc_key = 'dir_loc'\n dt_key = 'dt_fmt'\n clim_key = 'climatology'\n doy = today.timetuple().tm_yday\n\n print('settings', settings)\n\n if settings[clim_key]:\n # for climatology then we expect a DOY format\n if settings[dt_key] == 'doy':\n dynamic_key = '{:03d}'.format(doy)\n else:\n print('{} is set to climatology but date format from config_dict is {}'.format(settings[name_key],\n settings[dt_key]))\n sys.exit(0)\n elif settings[dt_key] == 'YYYYdoy':\n dynamic_key = '{}{:03d}'.format(today.year, doy)\n else:\n print('Hey user, the format of the dt_fmt configuration you gave: {} is not supported at '\n 'this time'.format(settings[dt_key]))\n sys.exit(0)\n\n fpath = os.path.join(settings[loc_key], settings[name_key].format(dynamic_key))\n return fpath", "def day(d):\n\t\tx = db.cquery(\"day\",d)\n\t\tprint \"Total:\", x[0]\n\t\tf = raw_input(\"[L]ist [N]ew overview or [B]ack to home \").lower()\n\t\tif f == \"l\":\n\t\t\tfor i in x[1]:\n\t\t\t\tprint ui.statsid(), i[0], i[1], \" \", ui.statstimein(), i[2], ui.statstimeout(), i[3]\n\t\t\traw_input(\"[Enter] to go back to search\")\n\t\t\thome_stats()\n\t\telif f == \"n\":\n\t\t\thome_stats()\n\t\telif f == \"b\":\n\t\t\thome()\n\t\telse:\n\t\t\tpass", "def load_data(city, month, day):\n df= pd.read_csv(CITY_DATA[city])\n df['Start Time']= pd.to_datetime(df['Start Time'])\n df['DOW'] = df['Start Time'].dt.weekday\n df['month'] = df['Start Time'].dt.month\n\n if month != 'all':\n months = ['january','february', 'march', 'april', 'may', 'june']\n month = months.index(month) + 1\n df = df[df['month'] == month]\n\n if day != 'all':\n df= df[df['DOW'] == day]\n\n return df", "def read_weo(path: Path, date:str) -> sd.StandardData:\n\tcolumns = sd.RequiredColumns(\n\t\tregion_name_column = 'Country',\n\t\tregion_code_column = 'ISO',\n\t\tcode_column = 'WEO Subject Code',\n\t\tname_column = 'Subject Descriptor',\n\t\tnote_column = 'Country/Series-specific Notes',\n\t\tscale_column = 'Scale',\n\t\tunits_column = 'Units',\n\t\tdescription_column = 'Subject Notes',\n\t\ttag_column = None\n\t)\n\n\tyear, month = date.split('-')\n\t# datasets are saved according to how many previous issues have been published the same year.\n\tmonth = '01' if int(month) < 5 else '02'\n\tdate = timetools.Timestamp(date+'-01')\n\treport = sd.StandardReport(\n\t\tname = 'World Economic Outlook',\n\t\tagency = 'International Monetary Fund',\n\t\turl = f'https://www.imf.org/external/pubs/ft/weo/{year}/{month:>02d}/weodata/download.aspx',\n\t\tdate = date\n\t)\n\n\treturn read_standard_table(path, columns)", "def produce_daily_summary(file, day): \n\n print(day)\n #prints day of report\n the_file = open(file)\n #opens file for use in function\n for line in the_file:\n #iterates over file to separate lines\n line = line.rstrip()\n #removes extra characters at end of lines\n words = line.split('|')\n #splits the lines of string for data variable assignment\n\n melon = words[0]\n count = words[1]\n amount = words[2]\n #assigns valiables to words at specified index\n\n print(f\"Delivered {count} {melon}s for total of ${amount}.\")\n #prints \"f\" string with variables printed in\n \n the_file.close()\n #closes file", "def get_observations_24hours(path=''):\n conn = None\n try:\n params = utils.config(path)\n conn = psycopg2.connect(**params)\n cur = conn.cursor()\n cur.execute(\"SELECT timestamp FROM ilm_ilm WHERE timestamp > now() - interval '1 day' ORDER BY timestamp\")\n print(\"Kandeid: \", cur.rowcount)\n row = cur.fetchone()\n\n while row is not None:\n # print(row)\n d = row[0]\n print(utils.utc2eesti_aeg(d))\n row = cur.fetchone()\n\n cur.close()\n except (Exception, psycopg2.DatabaseError) as error:\n print(error)\n finally:\n if conn is not None:\n conn.close()", "def scrape(self, day):\n url = '{base_url}/{month_day}'.format(\n base_url=self.url,\n month_day=day.strftime('%B_%-d')\n )\n\n response = requests.get(url)\n\n return day, response", "def read_day():\n\twhile True:\n\t\t_day = input(\"Introduceti ziua: \")\n\t\ttry:\n\t\t\t_day = int(_day)\n\t\t\tif (not is_in_range(_day, 0, VALID_DAY)):\n\t\t\t\tprint(\"Ziua invalida.\")\n\t\t\telse:\n\t\t\t\tbreak\n\t\texcept ValueError:\n\t\t\tprint(\"Ziua invalida, introduceti un intreg.\")\n\treturn (_day)", "def load_daily_data():\n return pd.read_csv(os.path.join('data', 'raw', 'full_grouped.csv'))", "def getTodaysWeather(self, keyword, temp):\n\n\t\t# Variables\n\t\tweather = {} \n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\t\t\n\t\t# Getting todays weather data and populating the dictionary\n\t\tif fio.has_daily() is True and fio.has_hourly() is True:\n\t\t daily = FIODaily.FIODaily(fio)\n\t\t hourly = FIOHourly.FIOHourly(fio)\n\t\t for day in xrange(0, 1):\n\t\t\t\tfor item in daily.get_day(day).keys():\n\t\t\t\t\tif item == \"temperatureMin\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"temperatureMax\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"windSpeed\":\n\t\t\t\t\t\twindSpeed = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"windBearing\":\n\t\t\t\t\t\twindBearing = unicode(daily.get_day(day)[item])\n\t\t\t\t\t\twindBearing = self.helper.convertWindBearing(windBearing)\n\t\t\t\t\tif item == \"sunsetTime\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"sunriseTime\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tweather[\"wind\"] = windBearing + \" \" + windSpeed + \" mph\"\n\t\t\t\tfor item in hourly.get_hour(day).keys():\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[\"current\"] = unicode(hourly.get_hour(0)[item])\n\t\t\t\t\tif item == \"temperature\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(0)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(0)[item])\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(0)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tweather[\"town\"] = self.helper.getCoords(keyword)[2]\n\t\telse:\n\t\t\treturn 'No Todays data'\n\n\t\treturn weather", "def extra_tasks_for_today(self):\n localtz = tzlocal()\n datetime_today = datetime.fromtimestamp(rospy.get_rostime().to_sec(), tz=localtz)\n day_today = datetime_today.strftime(\"%A\")\n date_today = datetime_today.date()\n rospy.loginfo('Looking for daily tasks for %s, %s' % (day_today, date_today))\n \n eight_forty_five= time(8,45, tzinfo=localtz)\n eleven_thirty= time(11,30, tzinfo=localtz)\n fourteen_thirty=time(14,30, tzinfo=localtz)\n seventeen_fifteen= time(17,15, tzinfo=localtz)\n past_bedtime = time(23,59, tzinfo=localtz)\n \n # day_end = seventeen_fifteen\n day_end = past_bedtime\n\n\n\n metric_wps=['WayPoint13', 'WayPoint18', 'WayPoint9','WayPoint11','WayPoint5','WayPoint3'] \n object_learn_wps=['WayPoint13', 'WayPoint18', 'WayPoint9', 'WayPoint11'] \n object_search_wps=['WayPoint1', 'WayPoint2', 'WayPoint3']\n door_wps=['WayPoint7', 'WayPoint4']\n \n morning_start = eight_forty_five\n morning_duration = delta_between(eleven_thirty, morning_start)\n \n lunch_start = eleven_thirty\n lunch_duration = delta_between(fourteen_thirty, lunch_start)\n\n afternoon_start = fourteen_thirty\n afternoon_duration = delta_between(day_end, afternoon_start)\n\n tasks = []\n \n #door checks at fixed times (to evaluate system ability to do stuff at corret times)\n task=create_door_check_task(door_wps[0])\n start_time=datetime.combine(date_today, time(10,30, tzinfo=localtz))\n end_time = start_time+timedelta(seconds=30)\n task.start_after=rospy.Time(unix_time(start_time))\n task.end_before=rospy.Time(unix_time(end_time))\n tasks.append(task)\n \n task=create_door_check_task(door_wps[0])\n start_time=datetime.combine(date_today, time(13,30, tzinfo=localtz))\n end_time = start_time+timedelta(seconds=30)\n task.start_after=rospy.Time(unix_time(start_time))\n task.end_before=rospy.Time(unix_time(end_time))\n tasks.append(task)\n \n task=create_door_check_task(door_wps[0])\n start_time=datetime.combine(date_today, time(16,30, tzinfo=localtz))\n end_time = start_time+timedelta(seconds=30)\n task.start_after=rospy.Time(unix_time(start_time))\n task.end_before=rospy.Time(unix_time(end_time))\n tasks.append(task)\n \n \n #random tasks\n for i in range(4):\n #morning\n task=create_metric_map_task(random.choice(metric_wps))\n self.set_random_task_time(task, date_today, morning_start, morning_duration)\n tasks.append(task)\n \n task=create_door_check_task(random.choice(door_wps))\n self.set_random_task_time(task, date_today, morning_start, morning_duration)\n tasks.append(task)\n \n if i<3:\n task=create_object_learn_task(random.choice(object_learn_wps))\n self.set_random_task_time(task, date_today, morning_start, morning_duration)\n tasks.append(task)\n \n task=create_object_search_task(random.choice(object_search_wps))\n self.set_random_task_time(task, date_today, morning_start, morning_duration)\n tasks.append(task)\n \n #lunch (less tasks because we want the robot mostly learning people tracks)\n if i<1:\n task=create_metric_map_task(random.choice(metric_wps))\n self.set_random_task_time(task, date_today, lunch_start, lunch_duration)\n tasks.append(task)\n \n task=create_door_check_task(random.choice(door_wps))\n self.set_random_task_time(task, date_today, lunch_start, lunch_duration)\n tasks.append(task)\n \n task=create_object_learn_task(random.choice(object_learn_wps))\n self.set_random_task_time(task, date_today, lunch_start, lunch_duration)\n tasks.append(task)\n \n task=create_object_search_task(random.choice(object_search_wps))\n self.set_random_task_time(task, date_today, lunch_start, lunch_duration)\n tasks.append(task)\n \n \n #afternoon\n task=create_metric_map_task(random.choice(metric_wps))\n self.set_random_task_time(task, date_today, afternoon_start, afternoon_duration)\n tasks.append(task)\n \n task=create_door_check_task(random.choice(door_wps))\n self.set_random_task_time(task, date_today, afternoon_start, afternoon_duration)\n tasks.append(task)\n \n if i<3:\n task=create_object_learn_task(random.choice(object_learn_wps))\n self.set_random_task_time(task, date_today, afternoon_start, afternoon_duration)\n tasks.append(task)\n \n task=create_object_search_task(random.choice(object_search_wps))\n self.set_random_task_time(task, date_today, afternoon_start, afternoon_duration)\n tasks.append(task)\n return tasks", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n\n df['month'] = df['Start Time'].dt.month\n df['day'] = df['Start Time'].dt.weekday_name\n df['hour'] = df['Start Time'].dt.hour\n\n \n if month != 'all':\n # use the index of the months list to get the corresponding int\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n month = months.index(month) + 1\n\n df = df[df['month'] == month]\n\n if day != 'all':\n df = df[df['day'] == day.title()]\n\n return df", "def do_last_reading(self, device):\n if hasattr(self, device):\n print_info('*' * 80)\n print(\"{}:\".format(device.upper()))\n\n rec = None\n if device == 'weather':\n rec = self.db.current.find_one({'type': 'weather'})\n elif device == 'environment':\n rec = self.db.current.find_one({'type': 'environment'})\n\n pprint(rec)\n print_info('*' * 80)", "def load_data(city, month, day):\n \n if city == 'chicago':\n filename = 'chicago.csv'\n elif city == 'new York':\n filename = 'new_york_city.csv'\n elif city == 'washington':\n filename = 'washington.csv'\n else:\n return -1\n \n df = pd.read_csv(filename)\n \n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['End Time'] = pd.to_datetime(df['End Time'])\n \n if month != 'all':\n df = df[df['Start Time'].dt.month == month]\n\n if day != 'all':\n df = df[df['Start Time'].dt.weekday == day]\n \n return df", "def time_search(year, month, day):\n entries = []\n cur = g.db.execute(\n \"\"\"\n SELECT entries.location FROM entries\n WHERE CAST(strftime('%Y',entries.published)AS INT) = {year}\n AND CAST(strftime('%m',entries.published)AS INT) = {month}\n AND CAST(strftime('%d',entries.published)AS INT) = {day}\n ORDER BY entries.published DESC\n \"\"\".format(year=int(year), month=int(month), day=int(day)))\n\n for (row,) in cur.fetchall():\n if os.path.exists(row+\".md\"):\n entries.append(file_parser(row+\".md\"))\n return render_template('blog_entries.html', entries=entries)", "def get_daily_obj(user='1'):\n daily_id = ''\n submissions = [x for x in sub if x.stickied]\n for submission in submissions:\n if 'daily' in submission.title.lower():\n # we will take the post id and write it to a file\n # then we will have the total number of files\n daily_id = submission\n if user == '1':\n return reddit1.submission(id=daily_id)\n elif user == '2':\n return reddit2.submission(id=daily_id)", "def Out_day(cal, y, m, d):\r\n flag = 0\r\n for i in cal:\r\n if y == i['year'] and m == i['month'] and d == i['day']:\r\n print \"\\n-------------------------------\\nAt \", y, m, d, \\\r\n \"\\n\", i['weather'], i['temperature'], ' wimd', i['wind'], \\\r\n \"\\n--------------------------------\"\r\n flag = 1\r\n if flag == 0:\r\n print \"Day not found :( Didn\\'t need people being told that the\"", "def generate_day_settings(date):\n global settings\n try:\n settings = load_default_for_date(date=date)\n save_day_settings(dict=settings, date=date)\n logging.info(\"Settings for %s generated.\", date)\n except:\n logging.exception(\"Unable to generate settings for %s\", date)\n # this is bad... kick off beeper and stop here!\n beep()\n raise", "def print_daily_forecast(update, context, day='today'):\n city = context.user_data['city']\n provider = context.user_data['provider']\n data = context.bot_data['forecast_data']\n for d in data: \n if d['city'] == city and d['provider'] == provider:\n forecast = d['forecast'][day]\n\n message = f\"ะŸั€ะพะณะฝะพะท ะฟะพะณะพะดั‹ ะฝะฐ {'ัะตะณะพะดะฝั' if day == 'today' else 'ะทะฐะฒั‚ั€ะฐ'} ({(datetime.date.today() if day == 'today' else datetime.date.today() + datetime.timedelta(days=1)).strftime('%A, %e %B')}):\\n\"\n\n for f in forecast:\n if f['time'] in [\"9:00\", \"15:00\", \"21:00\"]:\n message += f\"\"\"\n*{f['time']}* {f['temperature']} {f['description']} {f['emoji']}\n{'ะžัะฐะดะบะธ: ' + f['precipitation'] + ' ะผะผ' if provider == 'gismeteo' else 'ะ’ะตั€ะพัั‚ะฝะพัั‚ัŒ ะพัะฐะดะบะพะฒ: ' + f['precipitation_chance'] + '%'}\nะ’ะตั‚ะตั€: {f['windspeed'] + ' ะผ/c'}\n\"\"\"\n context.bot.send_message(chat_id=update.effective_chat.id, text=message, parse_mode='markdown')", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n df[\"Start Time\"] = pd.to_datetime(df[\"Start Time\"])\n df[\"month\"] = df[\"Start Time\"].dt.month\n df[\"day_of_week\"] = df[\"Start Time\"].dt.weekday\n\n if month != \"all\":\n month = months[month]\n df = df[df[\"month\"] == month]\n\n if day != \"all\":\n df = df[df[\"day_of_week\"] == days.index(day)]\n return df", "def Tobs_given_day(date):\n\n results = session.query(Measurement.date,Measurement.tobs).\\\nfilter(Measurement.date.between(One_yrs_ago,current_time)).\\\nfilter(func.strftime(\"%Y-%m-%d\",Measurement.date)==date).all()\n \n results1=[results[i][1] for i in range(len(results))]\n results={results[0][0]:results1}\n print(f\"Route /api/v1.0/precipitation/<date> with <date>={date} is being visited\")\n return jsonify(results)", "def two_emotions_specific_day(self, day):\n emotions = self.many_emotions_specific_day(day)\n return self._two_emotions(emotions)", "def load_data(city, month, day):\n\n df = pd.read_csv(CITY_DATA[city])\n\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['weekday'] = df['Start Time'].dt.weekday_name\n df['hour'] = df['Start Time'].dt.hour\n\n if month != 'all':\n month = months.index(month) + 1\n df = df[df['month'] == month]\n\n if day != 'all':\n df = df[df['weekday'] == day.title()]\n\n return df", "def poland_cases_by_date(day: int, month: int, year: int = 2020) -> int:\r\n \r\n # Your code goes here (remove pass)\r\n return confirmed_cases.loc[confirmed_cases[\"Country/Region\"]==\"Poland\"][f\"{month}/{day}/20\"].values[0]", "def moon_set(self, date=None):\n self._set_site_date(date)\n moonset = self.site.next_setting(self.moon)\n moonset = self.date_to_local(moonset.datetime())\n ## if moonset > self.sunrise():\n ## moonset = None\n return moonset", "def agregated_meal_data(request, mode = 'day', template_name = 'index.html'):\n meals = []\n today = get_today()\n diet = get_object_or_404(Diet, user=request.user)\n day_range = 1 if mode is None or mode == 'day' else 7\n \n if diet.start_date: # there's a diet and the starting day has been choosen\n days = [ diet.current_day_plan(today + datetime.timedelta(days=i)) for i in range(day_range) if diet.current_day_plan(today + datetime.timedelta(days=i)) is not None ]\n else: # there's a diet, but the starting day haven't been choosen\n days = diet.dayplan_set.filter(sequence_no__range=(1,day_range))\n\n for d in days: meals.extend(d.meal_set.all())\n shooping_list, shopping_list_other = agregate_and_preprocess_meal_data(meals)\n\n return direct_to_template(request, 'shopping/%s' % template_name, locals())", "def load_data(city, month, day):\n \n# Using pandas accessor to find month, day, hour from the Start Time column in the source data\n print(\"A moment please while I find the data....\\n\")\n df = pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['day_of_week'] = df['Start Time'].dt.weekday_name\n df['month'] = df['Start Time'].dt.month\n df['hour'] = df['Start Time'].dt.hour\n\n return df", "def getDailyWeather(self, keyword, temp):\n\n\t\t# Variables\n\t\tdaily_weather = []\n\t\tweather = {}\n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\n\t\t# Getting 4-day forecast, storing each day's data in a dictionary and\n\t\t# storing each dictionary in an array\n\t\tif fio.has_daily() is True:\n\t\t\tdaily = FIODaily.FIODaily(fio)\n\t\t\tfor day in xrange(0, 4):\n\t\t\t\tfor item in daily.get_day(day).keys():\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"temperatureMax\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\t\n\t\t\t\t\tif item == \"temperatureMin\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\t\tif item == \"time\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tdaily_weather.append(weather)\n\t\t\t\tweather = {}\n\t\telse:\n\t\t\treturn 'No Daily data'\n\t\treturn daily_weather", "def GetDays(self,FromDay,ToDay,SubID):\n\n data = pd.read_csv(self.OneDResultPath + str(SubID) +'.txt',\n header =None,delimiter = r'\\s+')\n data.columns=[\"day\" , \"hour\", \"xs\", \"q\", \"h\", \"wl\"]\n days = list(set(data['day']))\n days.sort()\n\n if FromDay not in days:\n Alt1 = FromDay\n\n stop = 0\n # search for the FromDay in the days column\n while stop == 0:\n # for i in range(0,10):\n try:\n np.where(data['day'] == Alt1)[0][0] #loc =\n stop = 1\n except:\n Alt1 = Alt1 - 1\n # print(Alt1)\n if Alt1 <= 0 :\n stop = 1\n continue\n\n Alt2 = FromDay\n # FromDay =\n # search for closest later days\n stop = 0\n while stop == 0:\n # for i in range(0,10):\n try:\n np.where(data['day'] == Alt2)[0][0] #loc =\n stop = 1\n except:\n Alt2 = Alt2 + 1\n # print(Alt2)\n if Alt2 >= data.loc[len(data)-1,'day']:\n stop = 1\n continue\n\n text = \"\"\"\"\n the FromDay you entered does not exist in the data, and the closest day earlier than your input day is\n \"\"\" + str(Alt1) + \"\"\" and the closest later day is \"\"\" + str(Alt2)\n print(text)\n\n if abs(Alt1 - FromDay) > abs(Alt2 - FromDay):\n Alt1 = Alt2\n else:\n print(\"FromDay you entered does exist in the data \")\n Alt1 = False\n\n\n\n if ToDay not in days:\n Alt3 = ToDay\n\n stop = 0\n # search for the FromDay in the days column\n while stop == 0:\n # for i in range(0,10):\n try:\n np.where(data['day'] == Alt3)[0][0] # loc =\n stop = 1\n except:\n Alt3 = Alt3 - 1\n # print(Alt1)\n if Alt3 <= 0 :\n stop = 1\n continue\n\n Alt4 = ToDay\n # FromDay =\n # search for closest later days\n stop = 0\n while stop == 0:\n # for i in range(0,10):\n try:\n np.where(data['day'] == Alt4)[0][0] #loc =\n stop = 1\n except:\n Alt4 = Alt4 + 1\n # print(Alt2)\n if Alt4 >= data.loc[len(data)-1,'day']:\n stop = 1\n continue\n # Alt3 = [Alt3, Alt4]\n text = \"\"\"\"\n the Today you entered does not exist in the data, and the closest day earlier than your input day is\n \"\"\" + str(Alt3) + \"\"\" and the closest later day is \"\"\" + str(Alt4)\n print(text)\n\n if abs(Alt3 - ToDay) > abs(Alt4 - ToDay):\n Alt3 = Alt4\n\n else:\n print(\"ToDay you entered does exist in the data \")\n Alt3 = False\n\n\n return Alt1, Alt3", "def get_monthday_song():\n current_day = day_of_month()\n print(f\"MONTHDAY:{current_day}\")\n if (current_day == 1):\n return random.choice([ \\\n \"One of Us\", \\\n \"One - Harry Nilsson\", \\\n \"One More Night \", \\\n \"One - Metallica\", \\\n \"The One - Backstreet Boys\", \\\n \"1 Step Forward, 3 Steps Back\"])\n elif (current_day == 2):\n return random.choice([\"It Takes Two\", \"Two Black Cadillacs\"])\n elif (current_day == 3):\n return random.choice([\"Three Little Birds\"])\n elif (current_day == 4):\n return random.choice([\"Four Mintues\", \"Four Five Seconds\"])\n elif (current_day == 5):\n return random.choice([\"Mambo Number 5\", \"I Got 5 on It\", \"Five More Minutes\"])\n elif (current_day == 6):\n return random.choice([\"Six Feet Under\"])\n elif (current_day == 7):\n return random.choice([ \\\n \"7 Years\", \\\n \"Seven Bridges Road\", \\\n \"Seven Seas of Rhye\", \\\n \"Seven Nation Army\", \\\n \"7 things\"])\n elif (current_day == 8):\n return random.choice([\"Eight Days a Week\", \"Eight Miles High\", \"8 out of 10\"])\n elif (current_day == 9):\n return random.choice([\"Love Potion Number 9\"])\n elif (current_day == 13):\n return random.choice([\n \"13 - LANY\", \\\n \"Floor 13 - Machine Gun Kelly\", \\\n \"13 - DDG\", \\\n \"Thirteen - Johnny Cash\"])\n elif (current_day == 14):\n return random.choice([\"14,400 Minutes\", \"March 15th\", \"14 Miles From Home\"])\n elif (current_day == 15):\n return random.choice([\"Fifteen - Taylor Swift\"])\n elif (current_day == 16):\n return random.choice([\"Summer Sixteen\", \"Sixteen Tons\", \"Sixteen\"])\n elif (current_day == 17):\n return random.choice([\"Edge of Seventeen\"])\n elif (current_day == 21):\n return random.choice([\"Twenty One - Khalid\"])\n elif (current_day == 22):\n return random.choice([\"22 - Taylor Swift\", \"Twenty Two and Some Change\", \"Twentytwo - Thea\"])\n elif (current_day == 23):\n return random.choice([\"23 Sam Hunt\", \"23 Mike Will\", \"23 Chayce Beckham\"])\n elif (current_day == 24):\n return random.choice([\"24 - Kanye\"])\n else:\n return \"\"", "def load_data(city, month, day):\n df=pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['day_of_week'] = df['Start Time'].dt.weekday_name\n if month != 'none':\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n month = months.index(month) +1\n df = df[df['month'] == month]\n\n\n if day != 'none':\n df = df[df['day_of_week'] == day.title()]\n return df", "def get_comments_for_one_day(self, y,m,d):\n in_date = date(y,m,d)\n\n start = self.utc_to_unix_time(in_date - timedelta(1)) \n end = self.utc_to_unix_time(in_date) \n return self.get_comments_between(start,end)", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n \n\n #Converting time\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['day_of_week'] = df['Start Time'].dt.weekday_name\n \n if month != 'all':\n month = MONTHS.index(month) + 1\n\n df = df[df['month'] == month]\n \n if day != 'all':\n df = df[df['day_of_week'] == day.title()]\n \n return df", "def get_user_data(self,day=today()):\n try:\n data = self.user.userdata_set.get(date=day)\n return data\n except UserData.DoesNotExist:\n # get the latest data\n approximated_weight = approximate_user_data_for_date(self.user.userdata_set.all(),\"weight\",day)\n if not approximated_weight: approximated_weight = 0\n return UserData(user=self.user, weight=approximated_weight, waist=0, date=day)", "async def dailytomorrow(self, ctx):\n if ctx.invoked_subcommand is None:\n await ctx.send_help(ctx.command)", "def load_data(city, month, day):\n df=pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month']=df['Start Time'].dt.month\n df['day_of_week']=df['Start Time'].dt.weekday_name\n if month!= 'all':\n months=['january','february','march','april','may','june']\n month= months.index(month)+1\n df=df[df['month']==month]\n if day!= 'all':\n df=df[df['day_of_week'] == day.title()]\n\n return df", "def load_data(city, month, day):\n df = pd.read_csv(city)\n df['month'] = pd.to_datetime(df['Start Time']).dt.month\n df['day_of_week'] = pd.to_datetime(df['Start Time']).dt.dayofweek\n if month != 'all':\n df = df[df['month'] == month]\n if day != 'all':\n df = df[df['day_of_week'] == day]\n\n return df", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['Month'] = df['Start Time'].dt.month\n df['Day_of_week'] = df['Start Time'].dt.weekday_name\n\n if month != 'all':\n month = months.index(month) + 1\n df = df[df['Month'] == month]\n if day != 'all':\n df = df[df['Day_of_week'] == day.title()]\n\n return df", "def for_day(self, day_date, partner_name):\n\n plant_for_day = None\n taxon = None\n\n while not taxon:\n candidate_plant = self._pick_candidate_plant(\n day_date, partner_name)\n if candidate_plant:\n # Make sure this plant still exists in the main database.\n try:\n taxon = Taxon.objects.get(\n scientific_name=candidate_plant.scientific_name)\n plant_for_day = candidate_plant\n except ObjectDoesNotExist:\n # Disable this plant in the Plant of the Day list,\n # so it cannot be picked again.\n candidate_plant.include = False\n candidate_plant.save()\n else:\n break\n\n if plant_for_day:\n plant_for_day.last_seen = day_date\n plant_for_day.save()\n\n return plant_for_day", "def load_data(city, month, day):\n df = pd.read_csv(city)\n df['day_of_week'] = pd.to_datetime(df['Start Time']).dt.dayofweek\n df['month'] = pd.to_datetime(df['Start Time']).dt.month\n if day != 'all':\n df = df[df['day_of_week'] == day]\n if month != 'all':\n df = df[df['month'] == month]\n df.drop('day_of_week', axis=1, inplace=True)\n df.drop('month', axis=1, inplace=True)\n return df", "def get_day_of_data(self, column_name, day):\n self.check_for_column(column_name)\n dt = pd.Timestamp(day)\n column = self.data[column_name]\n return column[column.index.date == dt.date()]", "def grabDaily(self):\n raise NotImplemented(\"method should be redefined in a subclass\")", "def fetch_sundata(self, date: datetime) -> Sundata:\n pass", "def load_data(city, month, day):\n # here i load the datak\n df=pd.read_csv(CITY_DATA[city])\n \n df['Start Time']=pd.to_datetime(df['Start Time'])\n \n df['month']=df['Start Time'].dt.month\n df['day_of_week']=df['Start Time'].dt.weekday_name\n df['hour']=df['Start Time'].dt.hour\n \n #filter by month\n if month!='all':\n month =months.index(month)+1\n df=df[df['month']==month]\n \n #filter by day of week\n if day!='all':\n df=df[df['day_of_week']==day.title()]\n \n return df", "def read_data_file(year=None, day=None, part_num=1, testing=False):\n now = datetime.datetime.today()\n if not day:\n day = now.day\n if not year:\n year = now.year\n fname = f\"day{'%02d' % day}_{part_num}\"\n if testing:\n fname += \"_test\"\n fname += \".txt\"\n path = os.path.join(\n os.path.dirname(os.path.realpath(__file__)), f\"../data/y{year}\", fname\n )\n with open(path, \"r\") as f:\n return f.read()", "def get_data(day, unixstart=None, unixend=None):\n global subnet\n df_data = pd.DataFrame([])\n while len(df_data.index)<=0:\n try:\n df_data = pd.read_feather(f'/home/pi/studies/ardmore/homeserver/h{subnet}_{day}.feather')\n except Exception as e:\n # print(f\"Error grid_server.get_data:{e}\")\n pass\n if unixstart!=None:\n df_data = df_data[(df_data['unixtime']>=unixstart)&(df_data['unixtime']<=unixend)]\n float_cols = [x for x in df_data.columns if not x.startswith('timezone')]\n df_data = df_data[float_cols].astype(float)\n return df_data", "def load_data(city, month, day):\n if city == 'new york city':\n df=pd.read_csv(\"./new_york_city.csv\")\n else: \n df=pd.read_csv(\"./\" + city + \".csv\")\n df['Start Time']=pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['day'] = df['Start Time'].dt.dayofweek\n df['hour'] =df['Start Time'].dt.hour\n if month !='all':\n df=df[df['month']==months_list[month]]\n if day != 'all':\n df=df[df['day']==days_list[day]]\n \n return df", "def get_weather_details(self, days: int = None):\n forecast = super().get_weather_forecast(self.BASE_URL)\n headers = [\n \"date\",\n \"min_temp\",\n \"max_temp\",\n \"phrase\",\n \"probability\",\n \"wind_speed\"]\n if days is None:\n days = 5\n for number in range(days):\n data = []\n date = forecast[\"DailyForecasts\"][number]['Date']\n date = date[:10]\n data.append(date)\n min_temp = round((int(\n (forecast[\"DailyForecasts\"][number][\"Temperature\"]\n [\"Minimum\"][\"Value\"])) - 32) / 1.8)\n data.append(min_temp)\n max_temp = round((int(\n (forecast[\"DailyForecasts\"][number][\"Temperature\"]\n [\"Maximum\"][\"Value\"])) - 32) / 1.8)\n data.append(max_temp)\n phrase = forecast[\"DailyForecasts\"][number][\"Day\"][\"LongPhrase\"]\n data.append(phrase)\n probability = (forecast[\"DailyForecasts\"][number][\"Day\"]\n [\"RainProbability\"])\n data.append(probability)\n wind_speed = round(int(\n (forecast[\"DailyForecasts\"][number][\"Day\"][\"Wind\"][\"Speed\"]\n [\"Value\"]) / 1.6), 1)\n data.append(wind_speed)\n yield dict(zip(headers, data))", "def melon_count_summary(day_number, path):\n print(\"Day\", day_number)\n the_file = open(path)\n for line in the_file:\n line = line.rstrip()\n words = line.split('|')\n print(\"Delivered {} {}s for total of ${}\".format(words[1], words[0], words[2]))\n the_file.close()", "def parse(day):\n data = Input(day).read().split('\\n')\n return [el for el in data if el != '']", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n\n \n df['Start Time'] = pd.to_datetime(df['Start Time'])\n # to_datetime command is used to convert(change) date into date format\n df['End Time'] = pd.to_datetime(df['End Time'])\n if month != 'all':\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n #used to find index of month.\n month = months.index(month) + 1 \n\n df = df[df['Start Time'].dt.month == month]\n \n #filter data by day.\n if day != 'all': \n df = df[df['Start Time'].dt.weekday_name == day.title()]\n #print 5 rows.\n print(df.head())\n return df", "def read_metar_ZA(metar_url, date_as_ISO_text=False):\n\n \n metar_list = [] # The list of dictionaries that will be returned, containing METAR data\n \n # Regular expressions to extract the wind\n re_wind_no_gust = re.compile(r'(?P<direction>[0-9]{3,3})(?P<spd>[0-9]{2,2})KT') # 10005KT\n re_wind_gust = re.compile(r'(?P<direction>[0-9]{3,3})(?P<spd>[0-9]{2,2})G(?P<gust>[0-9]{2,2})KT') # 10005G15KT\n re_wind_variable = re.compile(r'(?P<direction>VRB)(?P<spd>[0-9]{2,2})KT') # VRB05KT\n re_no_data = re.compile(r'No Data For (?P<missing>[A-Z,a-z]{4,4})', re.IGNORECASE) # No data for FAGC\n re_temp = re.compile(r' (?P<temp>[M]?[0-9]{2,2})+/(?P<dewpt>[M]?[0-9]{2,2}) ') #temp in format 20/12 or 20/M02 or M03/M10 etc. \n re_qnh = re.compile(r'Q(?P<qnh>[0-9]{3,4})')\n \n \n # Retrieve the webpage containing METAR data\n try:\n r = requests.get(metar_url, verify=False)\n except:\n current_app.logger.error(f\"Error retrieving METAR - failed at REQUESTS call\")\n return None\n \n \n # If error retrieving page, return None\n if r.status_code != 200: \n current_app.logger.error(f\"Error retrieving METAR: URL = {metar_url}: {r.status_code} - {r.reason}\")\n return None\n \n # Setup Beautiful Soup, and extract all the \"PRE\" tags - these are where the METAR data is stored\n soup = BeautifulSoup(r.text, 'html.parser')\n mets = soup.find_all('pre')\n \n #Connect to DB\n sess = sqa_session()\n \n # Loop through the individual METAR\n for met in mets:\n \n # Get just the text. Sould be: similar to: 'View DecodedMETAR FAOR 100530Z 19015KT CAVOK 15/M03 Q1020 NOSIG='\n met_string = str(met.text)\n \n is_speci = False # Is this a SPECI and not a METAR - default to False\n is_correction = False #Is this METAR a correction of an earlier (i.e. 'METAR COR xxxxxxxxx')\n \n # Determine if this is a METAR, a SPECI, or a line to be ignored\n s = met_string.find('METAR') # Is it a METAR?\n \n # If text not found, this is not a METAR - is it a SPECI?\n if s < 0:\n s = met_string.find('SPECI') # Is it a SPECI\n\n if s >= 0: # It is a speci\n is_speci = True\n \n else: # It's not a SPECI either, so continue to the next element\n continue\n\n s += 5 # 5 is the length of the text METAR and SPECI - we want to remove this.\n # Remove METAR/SPECI text - we should now have the raw METAR/SPECI only (eg. 'FAOR 100530Z 19015KT CAVOK 15/M03 Q1020 NOSIG=')\n met_string = met_string[s:].strip()\n \n # If this METAR is a Correction, then flag and remove the 'COR ' (eg: METAR COR FAHS 011200Z AUTO 30009KT 34/02 Q1017=\n if met_string[:4] == 'COR ':\n is_correction = True\n met_string = met_string[4:]\n \n # Extract aerodrome name\n aerodrome = met_string[:4]\n # Get aerodrome NavPoint - contains coordinates\n aero_point = sess.query(NavPoint).filter(NavPoint.ICAO_Code == aerodrome).first()\n \n # If aerdrome not found, this is a non-aerodrome station - ignore it (May implement later)\n if not aero_point:\n continue\n \n # Get the date and time\n day = int(met_string[5:7])\n hr = int(met_string[7:9])\n mn = int(met_string[9:11])\n \n met_date = calc_metar_taf_date(day, hr, mn)\n \n #Get the winds\n wind_variable = False # Wind defaults to not light and variable\n wind_gust = 0 # Gust defaults to 0\n no_wind = False #Is there no wind data avail (i.e. /////KT)\n \n \n #Check whether there is now wind specified (i.e. /////KT)\n if met_string.find('///KT') > 0:\n no_wind = True\n wind_dir = 0\n wind_spd = 0\n else:\n \n # Use regular expression to try to extract non-gusting wind (eg. 10010KT)\n tmp = re_wind_no_gust.search(met_string)\n if tmp:\n try:\n wind_dir = tmp.group('direction')\n wind_spd = tmp.group('spd')\n except:\n current_app.logger.error(f\"Error passing METAR winds: {met_string}\")\n \n # Use regular expression to try to extract gusting wind (eg. 10010G15KT)\n elif re_wind_gust.search(met_string):\n tmp = re_wind_gust.search(met_string)\n try:\n wind_dir = tmp.group('direction')\n wind_spd = tmp.group('spd')\n wind_gust = tmp.group('gust')\n except:\n current_app.logger.error(f\"Error passing METAR wind GUSTING: {met_string}\")\n \n # Use regular expression to try to extract variable wind (eg. VRB02KT)\n elif re_wind_variable.search(met_string):\n tmp = re_wind_variable.search(met_string)\n try:\n wind_dir = -1\n wind_spd = tmp.group('spd')\n wind_variable = True\n except:\n current_app.logger.error(f\"Error passing METAR wind VARIABLE: {met_string}\")\n\n # Use regular expression to try to extract Temp and Dewpoint (eg. 25/M02)\n temperature = 0\n dew_point = 0\n\n tmp = re_temp.search(met_string)\n if tmp:\n try:\n temperature = int(tmp.group('temp').replace('M','-'))\n dew_point = int(tmp.group('dewpt').replace('M','-'))\n except:\n current_app.logger.error(f\"Error passing METAR temperature: {met_string}\")\n\n\n # Use regular expression to try to extract QNH (eg. Q1025)\n qnh = 1013\n \n tmp = re_qnh.search(met_string)\n if tmp:\n try:\n qnh = tmp.group('qnh')\n except:\n current_app.logger.error(f\"Error passing METAR QNH: {met_string}\")\n \n if date_as_ISO_text == True:\n met_date = datetime.isoformat(met_date)\n \n met_dict = {'aerodrome': aerodrome , 'coords': (aero_point.Longitude, aero_point.Latitude), \n 'has_no_data': False , 'is_speci': is_speci, 'is_correction': is_correction, 'time': met_date, \n 'wind': {'no_wind_data': no_wind, 'direction': wind_dir, 'speed': wind_spd, 'gusting': wind_gust, 'is_variable': wind_variable}, #(wind_dir, wind_spd, wind_gust, wind_variable) , \n 'temperature': temperature, 'dew_point': dew_point,\n 'qnh': qnh,\n 'body': met_string}\n \n metar_list.append(met_dict)\n \n # Check for any stations with no data - search the whole page\n aero_no_datas = re_no_data.findall(soup.text)\n # If there are stations with no data, iterate through them\n if aero_no_datas:\n for aerodrome in aero_no_datas:\n # Get aerodrome NavPoint - contains coordinates\n aero_point = sess.query(NavPoint).filter(NavPoint.ICAO_Code == aerodrome).first()\n \n # If aerdrome not found, this is a non-aerodrome station - ignore it (May implement later)\n if not aero_point:\n continue\n \n # Add a disctionary item\n met_dict = {'aerodrome': aerodrome , 'coords': (aero_point.Longitude, aero_point.Latitude) , \n 'has_no_data': True, 'body': f'No data for {aerodrome}'}\n \n metar_list.append(met_dict)\n\n return metar_list", "def _check_day_data(self, datetime):\n if self.curr_day_data is None or self.compare_dates(self.curr_day_data.index[0], datetime) is False:\n date = dt.datetime(year=datetime.year, month=datetime.month, day=datetime.day)\n symbols = [product.symbol for product in self.products]\n self.curr_day_data = get_data_multi(symbols, date, second_bars=self.second_bars)\n self.clear_resting_orders()", "def info(self, user, charmsay):\n\t\trundays = 0\n\t\tdb = user.dbcon.cursor()\n\t\tdb.execute(\"SELECT value FROM sord WHERE name = 'gdays'\")\n\t\tfor value in db.fetchone():\n\t\t\trundays = value\n\t\tdb.close()\n\t\tthismsg = \"\\r\\n\"+self.cntransi(self.ESC+\"32mSaga Of The Red Dragon\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32m\"+self.ESC+\"1m\"+self.config.host)+\"\\r\\n\\r\\n\"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mCompiled June 25, 2009: Version \"+self.ESC+\"1m\"+self.ESC+\"37m\"+self.config.version+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"22m\"+self.ESC+\"32m(c) pre-2009 by Someone Else\\r\\n\\r\\n\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32m\"+self.ESC+\"1m\"+self.ESC+\"37mREGISTERED TO \"+self.ESC+\"0m\"+self.ESC+\"1m\"+self.ESC+\"34m\"+self.config.admin+self.ESC+\"0m\")+\"\\r\\n\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mThe current game has been running for \"+self.ESC+\"1m\"+str(rundays)+self.ESC+\"22m game days.\\r\\n\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are deleted after \"+self.ESC+\"1m\"+str(self.config.delinactive)+self.ESC+\"22m real days of inactivity.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are enjoying \"+self.ESC+\"1m\"+str(self.config.ffight)+self.ESC+\"22m forest fights per day.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are enjoying \"+self.ESC+\"1m\"+str(self.config.pfight)+self.ESC+\"22m player fights per day.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are enjoying \"+self.ESC+\"1m\"+str(self.config.bankinterest)+\"%\"+self.ESC+\"22m interest at the bank per day.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mThe current game day is \"+self.ESC+\"1m\"+str(self.config.daylength)+self.ESC+\"22m real hours long.\\r\\n\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += \"\\r\\n\"+self.ESC+\"32m The peasants say this about you : \\r\\n \"\n\t\ttry: \n\t\t\tthismsg += user.thisFullname + \" `2\" + charmsay[user.sex][user.charm]\n\t\texcept IndexError:\n\t\t\tthismsg += \"nothing at all.\"\n\t\tthismsg += \"`.\\r\\n\"\n\t\treturn thismsg", "def load_data(city, month, day):\n df = pd.read_csv('{}.csv'.format(city))\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['day_of_week'] = df['Start Time'].dt.weekday_name\n if month != 'all':\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n month = months.index(month.lower()) + 1\n df = df[df['month'] == month]\n\n if day != 'all':\n df = df[df['day_of_week'] == day.title()]\n\n\n return df", "def get_thread(dayname):\n d = datetime.date.today()\n\n try:\n title = config.get(dayname, \"title\") + ' (' + d.strftime(\"%B %d\") + ')'\n text = config.get(dayname, \"text\")\n except:\n sys.exit(2) # nothing found for today\n text = \"\\n\\n\".join(text.split(\"\\n\"))\n\n return title, text", "def download_house(self, n, day=None, month=None, year=None):\n\n try:\n # Check what is the starting point\n if 'Start.aspx' in self.driver.current_url:\n # We are on the login screen, we first need to login\n print('-- login in main screen')\n self.login()\n print('-- accessing house', n)\n self.goto(n)\n elif 'sunnyportal.com/Plants' in self.driver.current_url:\n # We are on the plant list, lets\n self.goto(n)\n elif 'sunnyportal.com/FixedPages/Dashboard.aspx' in self.driver.current_url:\n # We are on a dashboard, so we should be able to click on the left hand pannel to go to the new house\n self.goto_2(n)\n else:\n # No idea where we are\n raise Exception('I dont know where we are:', self.driver.current_url)\n print('-- downloading house', n, 'power data')\n res = self.download(day, month, year)\n self.date = self.wait_n_get(By.ID, id_date).get_attribute('value')\n if day:\n if not self.date == \"%d/%d/%d\"%(month, day, year):\n print('Error the date wasnt fixed correctly: '+self.date)\n\n if res:\n # There seems to be a positive response, so let's put it in a pandas dataframe\n df = pd.read_csv(StringIO(res.text), sep=';', names=['power', 'avg'], skiprows=1)\n print('-- download sucessful')\n return df\n else:\n print('-- download failed')\n # No response, we return a None object\n return res\n\n except Exception as e_1:\n # Something whent wrong\n try:\n # Check if sunny portal has banned us for some time\n text = self.wait_n_get(By.ID, 'ctl00_ContentPlaceHolder1_Logincontrol1_DivLogin').text\n if 'Login failed! Login will be blocked for' in text:\n # It does seem like we have been banned for some time\n print(text)\n n_sec = int(text.split('for')[1].split(' seconds')[0])\n print('going to sleep for %d sec'%(n_sec))\n time.sleep(n_sec)\n print('retrying this house')\n return self.download_house(n, day, month, year)\n except Exception as e_2:\n # I don't know what went wrong\n print(e_1)\n print(e_2)\n raise(e_1)", "def cargar_otras(self):\n\n stream_cargar = open ('yo_otros.txt', 'rt',encoding=\"utf-8\")\n datos=stream_cargar.readlines()\n \n # print(datos)\n # print (len(kasino.maquinas))\n\n lista_maquinas=[]\n lista_deco =[]\n day=\"\"\n money=\"\"\n\n contador=0\n dia_o_dinero=\"dia\"\n\n for i in datos[0]:\n # print(contador,i)\n if contador <8:\n lista_maquinas.append(i)\n contador+=1\n\n elif contador <17:\n lista_deco.append(i)\n contador+=1\n\n\n elif contador >= 17 and dia_o_dinero ==\"dia\":\n if i ==\"D\":\n pass\n elif i ==\"M\":\n dia_o_dinero=\"dinero\"\n else:\n day+=i\n elif contador >= 17 and dia_o_dinero == \"dinero\":\n money+=i\n \n \n\n # print(\"lm\",lista_maquinas)\n # print (\"ld\",lista_deco)\n # print(day,money)\n\n contador=0\n for i in kasino.maquinas:\n kasino.maquinas[i]=int(lista_maquinas[contador])\n contador+=1\n\n contador=0\n for i in kasino.decoracion:\n kasino.decoracion[i]=int(lista_deco[contador])\n contador+=1\n\n kasino.dia=int( day)\n kasino.dinero=int(money)", "def print_day(day):\n timetable = load_table(day)\n if timetable is False:\n print(\"file not found\")\n else:\n max_name = 0\n max_roomcode = 0\n max_type = 0\n for line in timetable:\n if len(line[1]) > max_name:\n max_name = len(line[1])\n if len(line[2]) > max_roomcode:\n max_roomcode = len(line[2])\n if len(line[4]) > max_type:\n max_type = len(line[4])\n print_title_border_horiz(max_name, max_roomcode, max_type)\n for line in timetable:\n print_data_line(line, max_name, max_roomcode, max_type)\n print_title_border_horiz(max_name, max_roomcode, max_type)", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n\n df['month_names'] = df['Start Time'].dt.month\n df['day_names'] = df['Start Time'].dt.weekday\n df['hour'] = df['Start Time'].dt.hour\n\n if month != 'all':\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n month = months.index(month) + 1\n df = df[df['month_names'] == month]\n\n if day != 'all':\n day_name = ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday']\n day = day_name.index(day) + 1\n df = df[df['day_names'] == day]\n\n return df", "def _get_dates():\n remote = os.path.join(BASE_URL, RSS_FEED)\n local = os.path.join(TMP, RSS_FEED)\n u..(remote, local)\n\n with open(local) as f:\n return PUB_DATE.findall(f.read())", "def load_data(city, month, day):\n while month != \"\":\n # load data file into a dataframe\n filename = CITY_DATA[city]\n df = pd.read_csv(filename)\n\n # convert the Start Time column to datetime\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n\n # extract month and day of week from Start Time to create new columns\n df['month'] = df['Start Time'].dt.month\n # df['day_of_week'] = df['Start Time'].dt.day_name()\n\n\n try: df['day_of_week'] = df['Start Time'].dt.weekday_name\n except: df['day_of_week'] = df['Start Time'].dt.day_name()\n else: df['day_of_week'] = df['Start Time'].dt.weekday\n \n \n \n df['hour'] = df['Start Time'].dt.hour\n\n\n # filter by month if applicable\n if month != 'all':\n # use the index of the months list to get the corresponding int\n # months = ['january', 'february', 'march', 'april', 'may', 'june','july','august','september','october','november','december']\n month = int(months.index(month)) + 1\n \n # filter by month to create the new dataframe\n df = df.loc[df['month'] == month]\n\n # filter by day of week if applicable\n if day != 'all':\n # filter by day of week to create the new dataframe\n df = df.loc[df['day_of_week'] == day.title()]\n \n return df", "def process(self, entity):\n\n\t\tresp = ''\n\n\t\t# last time forecast.io was called\n\t\tnow = time.localtime()\n\t\tdiff = (time.mktime(now) - time.mktime(self.weather_time)) / 60\n\t\t# print 'diff', diff\n\n\t\t# grab the json info\n\t\tj = self.forecast.json\n\n\t\t# update if it has been too long ... 5 mins\n\t\tif diff > 5:\n\t\t\tself.forecast.update()\n\t\t\tj = self.forecast.json\n\t\t\tself.weather_time = now\n\t\t\tprint 'update'\n\n\t\t# get weather asked for: today, tomorrow, monday, sunday, etc\n\t\tif 'datetime' in entity:\n\t\t\tt = entity['datetime'][0]['value']\n\t\t\tasked = time.strptime(t.split('.')[0],'%Y-%m-%dT%H:%M:%S')\n\n\t\t\t# get how many days in future\n\t\t\tw_time = asked.tm_mday - now.tm_mday\n\t\t\tif w_time >= 0 or w_time < 7: resp = self.grabWeatherDay( int( w_time ) )\n\n\t\telse:\n\t\t\ttemp = j['currently']['apparentTemperature']\n\t\t\train = j['currently']['precipProbability']*100.0\n\t\t\tresp = 'The weather is currently {0:d} degrees with {1:d} percent chance of rain'.format(temp, rain)\n\n\t\treturn resp", "def load_data(city, month, day):\n df=pd.read_csv(CITY_DATA[city])\n\n df['Start Time']=pd.to_datetime(df['Start Time'])\n\n df['month'] = df['Start Time'].dt.month\n\n df['day_of_week'] = df['Start Time'].dt.weekday_name\n\n df['hour'] = df['Start Time'].dt.hour\n\n if month != 'all':\n months = ['january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december']\n month = months.index(month) + 1\n df = df[df['month'] == month]\n\n if day != 'all':\n df = df[df['day_of_week'] == day.title()]\n\n return df", "def getting_user_weather_5days(location_key):\n\n API_Key = \"zIGuOeUd0aE4O621Gj1KGDc6JiZ3PAGb\"\n http_request = f\"http://dataservice.accuweather.com/forecasts/v1/daily/5day/{location_key}?apikey={API_Key}&language=pt-br&metric=true\"\n\n accu_request_5 = requests.get(http_request)\n\n if accu_request_5.status_code != 200:\n print(\"It was not possible to stablish connection with the metherological server. Please, try again later!\")\n exit()\n\n else:\n accu_response_5 = accu_request_5.json()\n\n return accu_response_5", "async def fetch_hourly_data(self, day=None):\n self._logger.info(\"Fetching hourly data for %s\", day)\n await self._client.select_customer(self.account_id, self.customer_id)\n await self._client.select_customer(self.account_id, self.customer_id)\n\n if day is None:\n # Get yesterday\n yesterday = datetime.now() - timedelta(days=1)\n day_str = yesterday.strftime(\"%Y-%m-%d\")\n elif hasattr(day, \"strftime\"):\n day_str = day.strftime(\"%Y-%m-%d\")\n else:\n try:\n datetime.strptime(day, \"%Y-%m-%d\")\n except ValueError:\n print(\"Start date bad format. It must match %Y-%m-%d\")\n return\n day_str = day\n\n params = {\"dateDebut\": day_str, \"dateFin\": day_str}\n res = await self._client.http_request(HOURLY_DATA_URL_2, \"get\",\n params=params, )\n # We can not use res.json() because the response header are not application/json\n json_res = json.loads(await res.text())\n\n if len(json_res.get('results')) == 0:\n self._hourly_data[day_str] = {\n 'day_mean_temp': None,\n 'day_min_temp': None,\n 'day_max_temp': None,\n 'hours': {},\n }\n tmp_hour_dict = dict((h, {'average_temperature':None}) for h in range(24))\n else:\n self._hourly_data[day_str] = {\n 'day_mean_temp': json_res['results'][0]['tempMoyJour'],\n 'day_min_temp': json_res['results'][0]['tempMinJour'],\n 'day_max_temp': json_res['results'][0]['tempMaxJour'],\n 'hours': {},\n }\n tmp_hour_dict = dict((h, {}) for h in range(24))\n for hour, temp in enumerate(json_res['results'][0]['listeTemperaturesHeure']):\n tmp_hour_dict[hour]['average_temperature'] = temp\n\n raw_hourly_weather_data = []\n if len(json_res.get('results')) == 0:\n # Missing Temperature data from Hydro-Quebec (but don't crash the app for that)\n raw_hourly_weather_data = [None]*24\n else:\n raw_hourly_weather_data = json_res['results'][0]['listeTemperaturesHeure']\n\n params = {\"date\": day_str}\n res = await self._client.http_request(HOURLY_DATA_URL_1, \"get\", params=params)\n # We can not use res.json() because the response header are not application/json\n json_res = json.loads(await res.text())\n for hour, data in enumerate(json_res['results']['listeDonneesConsoEnergieHoraire']):\n tmp_hour_dict[hour]['lower_price_consumption'] = data['consoReg']\n tmp_hour_dict[hour]['higher_price_consumption'] = data['consoHaut']\n tmp_hour_dict[hour]['total_consumption'] = data['consoTotal']\n self._hourly_data[day_str]['hours'] = tmp_hour_dict.copy()\n\n #Also copy the raw hourly data from hydroquebec (This can be used later for commercial accounts, mostly 15 minutes power data)\n self._hourly_data_raw[day_str] = {\n 'Energy': json_res['results']['listeDonneesConsoEnergieHoraire'],\n 'Power': json_res['results']['listeDonneesConsoPuissanceHoraire'],\n 'Weather': raw_hourly_weather_data\n }", "def read_day_range(where):\n\twhile True:\n\t\tif (where == 'start'):\n\t\t\t_day = input(\"Introduceti ziua de inceput: \")\n\t\telif (where == 'end'):\n\t\t\t_day = input(\"Introduceti ziua de sfarsit: \")\n\t\telse:\n\t\t\traise NameError\n\t\ttry:\n\t\t\t_day = int(_day)\n\t\t\tif (not is_in_range(_day, 0, VALID_DAY)):\n\t\t\t\tprint(\"Ziua invalida.\")\t\n\t\t\telse:\n\t\t\t\tbreak\n\t\texcept ValueError:\n\t\t\tprint(\"Ziua invalida, introduceti un intreg.\")\n\treturn (_day)", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['day_of_week'] = df['Start Time'].dt.day_name()\n df['hour'] = df['Start Time'].dt.hour\n if month != 'all':\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n month = months.index(month) + 1\n df = df[df['month'] == month]\n\n if day != 'all':\n df = df[df['day_of_week'].str.startswith(day.title())]\n\n return df", "def load_data(city, month, day):\n\n df = pd.read_csv(CITY_DATA[CITIES[city]])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['Month'] = df['Start Time'].dt.month\n df['Day of Week'] = df['Start Time'].dt.dayofweek\n\n # get the subset of data where the month matches the one chosen\n if month != 0:\n df = df[df['Month'] == month]\n \n # get the subset of data where the day of the week matches the one chosen\n if day != 7:\n df = df[df['Day of Week'] == day]\n \n return df", "def read_grp(fname):\n global DAYS\n uint_types = [DAYS,\n 'Current crop type', \n 'Current residue on ground type', \n 'Previous residue on ground type', \n 'Old residue on ground type', \n 'Current dead root type', \n 'Previous dead root type', \n 'Old dead root type']\n\n meta = {}\n data = None\n header = []\n\n meta['fname'] = fname\n meta['id'] = ''.join([L for L in fname if L in '0123456789'])\n \n fid = open(fname, 'rb')\n for i, line in enumerate(fid.readlines()):\n line_as_list = line.strip().split()\n\n if len(line_as_list) == 0:\n continue\n\n elif line_as_list[0][0] == '#':\n continue\n\n elif line_as_list[0] == 'int':\n try:\n meta[line[1]] = int(line[2])\n except:\n pass\n \n elif line_as_list[0] == 'float':\n try:\n meta[line[1]] = float(line[2])\n except:\n pass\n\n elif line_as_list[0] == 'char':\n continue\n\n elif line_as_list[0][0] == '{':\n cname = line.strip()[1:-1].replace(r'kg/m', r'kg*m**-1') \\\n .replace(r'kg/m**2', r'kg*m**-2') \\\n .replace(r'kg/m**3', r'kg*m**-3') \\\n .replace(r'kg/m**4', r'kg*m**-4') \\\n .replace(r'mm/hr', r'mm*hr**-1') \\\n .replace(r'mm/h', r'mm*hr**-1') \\\n .replace(r'm/day', r'm*day**-1') \\\n .replace(r'g/cc', r'g*cc**-1') \\\n .replace(r'kg-s/m**4', r'kg-s*m**-4') \\\n .replace(r's/m', r's*m**-1') \\\n .replace(r'Irrigation_volume_supplied/unit_area',\n r'Irrigation_volume_supplied*unit_area**-1')\n header.append(cname)\n\n else:\n if len(header) == len(line_as_list):\n \n # if we are here and data == None we need to initialize the data dictionary\n if data == None:\n data = {}\n for cname in header:\n typecode = ('f', 'h')[any([cname==s for s in uint_types])]\n data[cname] = array.array(typecode)\n\n for (cname, string) in zip(header, line_as_list):\n if any([cname==s for s in uint_types]):\n value = int(string)\n else:\n value = float(string)\n\n if cname == DAYS:\n\n if value in set(data[DAYS]):\n break\n\n data[cname].append(value)\n\n else:\n raise Exception('Failed to parse line %i, unexpected number of columns.'%(i+1))\n \n fid.close()\n\n # pack the table data into numpy arrays\n for (cname, v) in data.items():\n dtype = (np.float32, np.int16)[any([cname==s for s in uint_types])]\n data[cname] = np.array(v, dtype=dtype)\n\n return (meta, data)", "def goodmorning(host):", "def get_tagged_events():\n\n f = open('event_info.txt', 'w+')\n f.write('')\n f.close()\n\n for category in MEETUP_TAGS:\n events_added = 0\n days = 5\n while events_added < NUM_EVENTS:\n\n urls = set()\n\n today = datetime.date.today()\n tomorrow = today\n\n tomorrow = tomorrow + datetime.timedelta(days=days)\n\n # https://www.meetup.com/find/events/arts-culture/?allMeetups=false&radius=5&userFreeform=New+York%2C+NY&mcId=z10025&month=4&day=20&year=2018&eventFilter=all\n\n url = 'www.meetup.com/find/events/{}/?allMeetups=true&radius=20 \\\n &userFreeform=New+York%2C+NY&mcId=c10001&mcName=New+York%2C+NY \\\n &month={}&day={}&year={}'.format(category,\n tomorrow.month,\n tomorrow.day,\n tomorrow.year)\n\n r = requests.get('https://' + url)\n print('https://' + url)\n data = r.text\n soup = BeautifulSoup(data)\n\n for link in soup.find_all('a'):\n href = link.get('href')\n if '/events/' in href and '/find/' not in href:\n urls.add(href)\n\n if not urls:\n break\n\n for url in urls:\n os.system('python retrieval.py ' + url + ' ' + category)\n events_added += 1\n if events_added > NUM_EVENTS:\n break\n\n print('Finished ' + str(days))\n days += 1", "def findWetWeatherDays(self, dbsession, today):\n wetDays = dbsession.query(self.dt).filter(or_(self.weather_description == \"light rain\", self.weather_description == \"moderate rain\")).all()\n # if one of those days is today return it.\n # else just return a wet day.\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]", "def day_to_day(self):\n while True:\n yield 0", "def read_birthdays(file_path):\r\n # f = open(file_path, 'r')\r\n # return f.readlines()[6:]\r\n with open(file_path) as file:\r\n return file.read()", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n#changed 'weekday_name' to just 'weekday' which outputs the weekday as integer\n # extract month, day of week and hour from Start Time to create new columns\n df['month'] = df['Start Time'].dt.month\n df['dow'] = df['Start Time'].dt.weekday\n df['hour'] = df['Start Time'].dt.hour\n\n # filter by month if applicable\n if month != 'all':\n # use the index of the months list to get the corresponding int\n month = months.index(month) + 1\n # filter by month to create the new dataframe\n df = df[df['month'] == month]\n\n# problem with the 'day'-filter, if a day (not 'all') is applied, the output is not right\n # filter by day of week if applicable\n if day != 'all':\n\n # filter by day of week to create the new dataframe\n day = days.index(day) + 1\n df = df[df['dow'] == day]\n\n return df", "def get_day(x):\n return x[\"SALE DATE\"].day", "def fetchDynamoFeed(datetime_object):\n\tdynamodb = boto3.resource('dynamodb', aws_access_key_id=k.DYNAMO_ACCESS_KEY, aws_secret_access_key=k.DYNAMO_SECRET_ACCESS_KEY, region_name=k.DYNAMO_REGION_NAME)\n\ttable = dynamodb.Table(k.DYNAMO_TABLE)\n\tresponse = table.scan(\n \t\tFilterExpression=Attr('date').eq(datetime_object) \n\t)\n\treturn response", "def load_data(city, month, day):\n\n df = pd.read_csv(CITY_DATA[city.lower()])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n df['month'] = df['Start Time'].dt.month\n df['day_of_week'] = df['Start Time'].dt.dayofweek # check that this is the right method()\n\n if month != 'all':\n month = months.index(month.lower()) + 1 # take our month input, and index it to get the integer value provided by datetime()\n df = df[df['month'] == month]\n\n if day != 'all':\n day = days.index(day.lower())\n df = df[df['day_of_week'] == day]\n\n return df", "def get_last_seven_days():\n logs = json.load(open(\"seven_log\", \"r\"))\n days = [day for day in logs]\n usage = [[logs[day][gpu] for gpu in logs[day]] for day in logs]\n return days, usage" ]
[ "0.59969056", "0.5583099", "0.5550923", "0.54960376", "0.5361208", "0.52583355", "0.5196197", "0.5166738", "0.5159462", "0.51388514", "0.5126825", "0.5095728", "0.50937", "0.5065566", "0.5062365", "0.5062365", "0.5062365", "0.5062365", "0.5062365", "0.50592273", "0.50472623", "0.5013457", "0.50046337", "0.49728563", "0.497097", "0.4960298", "0.49507278", "0.49205595", "0.49163744", "0.48848736", "0.48832092", "0.48819202", "0.4880095", "0.48707333", "0.4867388", "0.48663282", "0.48618373", "0.48588967", "0.4832728", "0.48279002", "0.48228496", "0.4818596", "0.481331", "0.48056814", "0.48041975", "0.48014995", "0.47934383", "0.47720954", "0.47641987", "0.47475392", "0.47446176", "0.47408968", "0.47400323", "0.47337082", "0.472969", "0.47278714", "0.4724779", "0.4721969", "0.4721397", "0.47202602", "0.47138563", "0.47125486", "0.47072935", "0.47044075", "0.4702052", "0.46931395", "0.46885607", "0.46876433", "0.46825144", "0.4676143", "0.46746683", "0.46685812", "0.46500096", "0.46486744", "0.46231717", "0.4620677", "0.46188736", "0.46184516", "0.46162957", "0.46162692", "0.46088353", "0.46066105", "0.46064156", "0.45993388", "0.45948258", "0.45941657", "0.4592514", "0.4591093", "0.45910645", "0.4590769", "0.45900953", "0.4589957", "0.45862827", "0.45858026", "0.45855215", "0.45769927", "0.45761582", "0.45747423", "0.45739067", "0.45725176" ]
0.6528976
0
This function reads the meteo for a given week
def read_meteo_week(filename=None): if not filename: filename = settings.METEO_WEEK_FILENAME # This function is the date parser for the week parser = lambda x: pd.datetime.strptime(x, '%Y%W') return pd.read_csv(filename, sep=';', parse_dates=[4], date_parser=parser)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def weekly():", "def LoadWeeklyChart(self, week):\n file = '%s.html' % week\n if file in os.listdir(self.HTML_DIR):\n print 'Loading chart for week of %s' % week\n with open(os.path.join(self.HTML_DIR, file), 'r') as f:\n chart = f.read()\n else:\n chart = self.DownloadWeeklyChart(week)\n self.SaveWeeklyChart(chart, file)\n return chart", "async def get_chapel_week(ctx, week_num):\n # week number must be none or a digit.\n if week_num is not None and not week_num.isdigit():\n return\n\n # get chapel schedule.\n contents = json_to_dict('json_files/chapel/schedule.json')\n\n schedule = []\n for week in contents:\n if week_num is not None and week != f'Week {week_num}':\n continue\n\n schedule.append('')\n schedule.append(f'__**{week}**__')\n\n # get chapel information for each week.\n for date in contents[week]:\n day_of_week = contents[week][date]['day_of_week']\n speaker = contents[week][date]['speaker']\n\n schedule.append(f'**{date}** [{day_of_week}] - *{speaker}*')\n\n # print chapel schedule.\n separator = '\\n'\n description = separator.join(schedule)\n\n # print error message.\n if len(description) == 0:\n await send_embed(ctx, title=get_chapel_title(), text=f'*no scheduled chapel for week {week_num}.*')\n\n # display chapel information.\n await send_embed(ctx, title=get_chapel_title(), text=description)", "def read_recipes(year, week):\n # read config file\n cp = ConfigParser()\n cp.read(\"config.ini\")\n \n # load menu data\n fname_json = cp[\"other\"][\"json_out_fname\"]\n if not os.path.exists(fname_json):\n logging.error(\"JSON file not found.\")\n return\n \n with open(fname_json) as f:\n menu = json.load(f)\n \n # read recipes: items >> [courses] >> [recipes]\n recipes = []\n for item in menu[\"items\"]:\n for course in item[\"courses\"]:\n recipes.append(course[\"recipe\"])\n logging.info(\"%d recipes found\", len(recipes))\n\n data = []\n for recipe in recipes:\n recipe_data = []\n recipe_data.append(recipe[\"name\"])\n recipe_data.append(recipe[\"headline\"])\n recipe_data.append(recipe[\"prepTime\"])\n recipe_data.append(recipe[\"ratingsCount\"])\n recipe_data.append(recipe[\"favoritesCount\"])\n\n # nutritions\n for i in range(7):\n recipe_data.append(recipe[\"nutrition\"][i][\"amount\"])\n\n data.append(recipe_data)\n\n column_names = [\"Name\",\"Headline\",\"PrepTime\",\"RatingsCount\",\"FavoritesCount\",\"Nutrition-Energy(KJ)\",\"Nutrition-Fat\",\n \"Nutrition-of which saturates\",\"Nutrition-Carbohydrate\",\"Nutrition-of which sugars\",\"Nutrition-Protein\",\"Nutrition-Sodium\"]\n df_recipes = pd.DataFrame(data, columns = column_names)\n\n # save recipe data into csv\n fname_csv = str(year) + \"_\" + str(week) + \"_menu.csv\"\n df_recipes.to_csv(fname_csv, index=False)\n logging.info(\"recipes exported to csv.\")\n \n\n # extract top 10 recipes based on RatingsCount and FavoritesCount\n params = {\"names\": [\"RatingsCount\",\"FavoritesCount\"],\n \"order\": [False,False]\n }\n df_top_recipes = get_top_recipes(df_recipes, sort_params=params, count=10)\n\n # save top 10 recipes into csv\n fname_out = str(year) + \"_\" + str(week) + \"_TOP_10.csv\"\n df_top_recipes.to_csv(fname_out, index=False)\n logging.info(\"top 10 recipes exported to csv.\")", "def process(raw):\n #global weekNum\n field = None\n entry = {}\n cooked = []\n number = -1\n\n for line in raw:\n log.debug(\"Line: {}\".format(line))\n line = line.strip()\n if len(line) == 0 or line[0] == \"#\":#if # is the first character, skip\n log.debug(\"Skipping\")\n continue\n parts = line.split(':')#split lines to before and after \":\"\n if len(parts) == 1 and field:#adds additional content to whatever the previously used field is\n entry[field] = entry[field] + line + \" \" \n continue\n if len(parts) == 2:#if there are 2 parts, the field is the first part and the content is the second part\n field = parts[0]\n content = parts[1]\n else:#if none of the above are correct there is an issue\n raise ValueError(\"Trouble with line: '{}'\\n\".format(line) +\n \"Split into |{}|\".format(\"|\".join(parts)))\n\n if field == \"begin\":#checking if this is the line with the start date\n try:#begin only triggers once (at least it should only trigger once)\n base = arrow.get(content, \"MM/DD/YYYY\")#get the date as an object named \"base\", will need to use this to determine start date and current week, arrow must have a \"current date\"?\n # base is the \"week 1\" date, DD = 1, DD + 7 = 2, DD + 14 = 3, DD + 21 = 4, etc\n #now i will make variables for the start date of each week, or find a way to take the difference between 2 dates\n #end = base#arrow.get(base, \"MM/DD/YYYY\")\n #end = end.shift(weeks=+10)\n #today = arrow.now()\n #today.format(\"MM/DD/YYYY\")\n #if today == base:\n # weekNum = 1\n #number = -1\n \"\"\"weeks = [base, base.shift(days=+7), base.shift(days=+14), base.shift(days=+21), base.shift(days=+28), base.shift(days=+35), base.shift(days=+42), base.shift(days=+49), base.shift(days=+56), base.shift(days=+63), base.shift(days=+70)]\n today = arrow.now()\n for i in range(0,9):\n if weeks[i] <= today <= weeks[i+1]:\n number = i+1\n if today > weeks[10]:\n number = 10\n elif today < weeks[0]:\n number = 0\n #base = arrow.format(\"MM/DD/YYYY\")\n else:\n raise ValueError(\"Big error calculating week\")\n #for index in range(1,70):\n # base = base.shift(days=+1)\n # if today == base:\n # weekNum = weekNum + (index % 7)\n # break \n base = base.format(\"MM/DD/YYYY\")\"\"\"\n except:\n raise ValueError(\"Unable to parse date {}\".format(content))#date is incorrectly formatted, should be MM/DD/YYYY\n #now I need to check if either of these weeks is the current week\n# for r in arrow.Arrow.span_range('day',\n elif field == \"week\":#this is the week number\n if entry:\n cooked.append(entry)\n entry = {}#make entry empty again\n #if content == currentWeekNum:\n #print(\"Content: \" + content)\n #print(\"Week Number: \" + currentWeekNum + \"\\n\")\n #print(\"Is Current Week?\" + currentWeekBool + \"\\n\")\n # currentWeekBool = True\n entry['topic'] = \"\"#these are all \"classes\" in the HTML document\n entry['project'] = \"\"\n entry['week'] = content#put the week number into the \"week\" field in the html document\n #entry['isCurrentWeek'] = currentWeekBool\n #currentWeekBool = False\n #if content == weekNum:\n # entry['bool'] = True\n #else:\n # entry['bool'] = True\n \"\"\"if \n if content == currentWeekNum:\n entry['isCurrentWeek'] = True\n else:\n entry['isCurrentWeek'] = False\"\"\"\n\n elif field == 'topic' or field == 'project':#from if len == 2, set the entry for the field to the content in the html doc\n entry[field] = content\n\n else:\n raise ValueError(\"Syntax error in line: {}\".format(line))\n #entryn = entry + \"\\n\"\n\t#cookedn = cooked + \"\\n\"\n\t#fieldn = field + \"\\n\"\n\t#print(\"Entry: \" + entryn)\n #print(\"Cooked: \" + cookedn)\n #print(\"Field: \" + fiieldn)\n if entry:#appends whatever added stuff to the whole docuemnt\n cooked.append(entry)\n\t#returns formatted document after it has been looped throughi\n #number = getWeekNum(raw)\n weeks = [base, base.shift(days=+7), base.shift(days=+14), base.shift(days=+21), base.shift(days=+28), base.shift(days=+35), base.shift(days=+42), base.shift(days=+49), base.shift(days=+56), base.shift(days=+63), base.shift(days=+70)]\n today = arrow.now()\n for i in range(0,9):\n if weeks[i] <= today <= weeks[i+1]:\n number = i+1\n return [cooked, i+1]\n if today < weeks[0]:\n number = 0\n else:\n number = 10\n return [cooked, number]", "def DownloadWeeklyChart(self, week):\n print 'Downloading chart for week of %s' % week\n url = ('http://www.boxofficemojo.com/weekly/chart/?yr=2000&wk=%d&p=.htm'\n % self.GetWeekNum(week))\n response = requests.get(url, headers=self.REQUEST_HEADERS)\n time.sleep(2)\n return response.content", "def read_weo(path: Path, date:str) -> sd.StandardData:\n\tcolumns = sd.RequiredColumns(\n\t\tregion_name_column = 'Country',\n\t\tregion_code_column = 'ISO',\n\t\tcode_column = 'WEO Subject Code',\n\t\tname_column = 'Subject Descriptor',\n\t\tnote_column = 'Country/Series-specific Notes',\n\t\tscale_column = 'Scale',\n\t\tunits_column = 'Units',\n\t\tdescription_column = 'Subject Notes',\n\t\ttag_column = None\n\t)\n\n\tyear, month = date.split('-')\n\t# datasets are saved according to how many previous issues have been published the same year.\n\tmonth = '01' if int(month) < 5 else '02'\n\tdate = timetools.Timestamp(date+'-01')\n\treport = sd.StandardReport(\n\t\tname = 'World Economic Outlook',\n\t\tagency = 'International Monetary Fund',\n\t\turl = f'https://www.imf.org/external/pubs/ft/weo/{year}/{month:>02d}/weodata/download.aspx',\n\t\tdate = date\n\t)\n\n\treturn read_standard_table(path, columns)", "def weekly():\n\n response = {}\n\n # 0..6 => Sunday..Saturday\n for i in range(7):\n hours = []\n interactions = 0\n\n for j in range(25):\n try:\n wfile = open(common.stats_path + '/weekly-average/' + str(i) + '/' + str(j))\n data = wfile.read()\n\n if j == 24:\n interactions = int(data)\n else:\n hours.append(int(data))\n\n wfile.close()\n except IOError:\n if i < 24:\n hours.append(0)\n\n response[DAYS[i]] = {'hours': hours, 'interactions': interactions}\n\n return response", "def news_for_week(self):\n\n raise NotImplementedError", "def week(update: Update, _: CallbackContext) -> None:\n running_total, average_dose_per_day = return_weekly_figure()\n text = \\\n (\n \"\\n๐Ÿ“… *Rolling 7 Day Stats*\\n\" \n + \"\\n\\t\\t\\t๐Ÿ“ˆ Rolling 7 Day Doses - \" + str('{:,}'.format(running_total))\n + \"\\n\\t\\t\\t๐Ÿ’‰ Average Daily Doses - \" + str('{:,}'.format(average_dose_per_day)) \n )\n update.message.reply_markdown(text)\n logger.info(\"Getting week update for \" + str(update.message.chat_id))", "def set_week_day(self, wday):\r\n\t\twdays = ['Domingo', 'Lunes', 'Martes', 'Miercoles',\r\n\t\t\t\t 'Jueves', 'Viernes', 'Sabado']\r\n\t\tfor i in range(7):\r\n\t\t\tif wday == i: \r\n\t\t\t\treturn wdays[i]", "def find_pick_for_week(self, week, key_only=False):\n return self.find_pick('week', week)", "def do_rw(self, arg):\n self.do_timesheet('report week')", "def nflweek(self, irc, msg, args, optlist, optweek):\n \n url = self._b64decode('aHR0cDovL3MzLmFtYXpvbmF3cy5jb20vbmZsZ2MvYWxsU2NoZWR1bGUuanM=')\n \n usePre, useNext, outputWeek = False, False, False\n for (option, arg) in optlist:\n if option == 'pre':\n usePre = True\n \n if optweek:\n if optweek == \"next\":\n useNext = True\n elif optweek.isdigit():\n if usePre: \n if 1 <= int(optweek) <= 4:\n outputWeek = \"Preseason Week %s\" % optweek\n else:\n irc.reply(\"ERROR: Preseason week number must be between 1 and 4.\")\n return\n else:\n if 1 <= int(optweek) <= 17:\n outputWeek = \"Week %s\" % optweek\n else:\n irc.reply(\"ERROR: Week must be between 1-17\")\n return \n\n try:\n req = urllib2.Request(url)\n html = (urllib2.urlopen(req)).read()\n except:\n irc.reply(\"Failed to open: %s\" % url)\n return\n \n jsondata = json.loads(html)\n\n week = jsondata.get('week', None) # work with the week data so we know where we are.\n\n if week is None:\n irc.reply(\"Failed to load schedule.\")\n return\n\n currentWeekName = week.get('current', {'current': None}).get('weekName', None) \n nextWeekName = week.get('next', {'next': None}).get('weekName', None) \n\n if currentWeekName is None:\n irc.reply(\"Cannot figure out the current week.\")\n return\n\n games = jsondata.get('content', None) # data in games.\n \n if games is None:\n irc.reply(\"Failed to load the games data.\")\n return\n \n if outputWeek:\n games = [item['games'] for item in games if item['weekName'] == outputWeek]\n weekOutput = outputWeek\n elif useNext:\n games = [item['games'] for item in games if item['weekName'] == nextWeekName]\n weekOutput = nextWeekName\n else:\n games = [item['games'] for item in games if item['weekName'] == currentWeekName]\n weekOutput = currentWeekName\n \n append_list = []\n\n for games in games:\n for t in games:\n awayTeam = self._translateTeam('team', 'nid', t['awayTeamId'])\n homeTeam = self._translateTeam('team', 'nid', t['homeTeamId'])\n append_list.append(\"[\" + t['date']['num'] + \"] \" + awayTeam + \"@\" + homeTeam + \" \" + t['date']['time'])\n \n descstring = string.join([item for item in append_list], \" | \")\n output = \"{0} :: {1}\".format(ircutils.bold(weekOutput), descstring)\n \n irc.reply(output)", "def main():\n print(day_of_week(datetime.now()))\n print(day_of_week(datetime(2019, 7, 4)))\n print(day_of_week(datetime(2013, 12, 25)))\n print(day_of_week(datetime(2000, 1, 1)))", "def weekly_report(session, temperature, windspeed, pathname):\n starttime = session.timeEvent\n endtime = starttime - datetime.timedelta(weeks=1)\n\n try:\n queryperiod = epm.QueryPeriod(starttime, endtime)\n processInterval = datetime.timedelta(minutes=15)\n aggregationdetails = epm.AggregateDetails(processInterval, epm.AggregateType.Interpolative)\n temperature_data = temperature.historyReadAggregate(aggregationdetails, queryperiod)\n windspeed_data = windspeed.historyReadAggregate(aggregationdetails, queryperiod)\n\n \n except:\n raise Exception('get interpolative data error')", "def get_player_stats_from_game(team, year, week):", "def get_by_week(user_id: str, year: int, week: int) -> Optional[Snippet]:\n snippet = Snippet.query.filter_by(\n user_id=user_id, year=year, week=week\n )\n return snippet.first()", "def weekNumber(self): # real signature unknown; restored from __doc__\r\n pass", "def day_of_the_week(arg):", "def wkday_on_first(yr, mon): # returns day of week of first of month of the given year (1/1/2016)\r\n TotalDays = 0\r\n for x in range(1754, yr):\r\n YearNum = yeardays(x)\r\n TotalDays += YearNum\r\n for x in range(1, mon):\r\n MonNum = monthdays(yr, x)\r\n TotalDays += MonNum\r\n WhatDayNum = TotalDays % 7\r\n WhatDay = [\"Tues\", \"Wedn\", \"Thu\", \"Fri\", \"Sat\", \"Mon\"]\r\n return WhatDay[WhatDayNum]", "def get_mta_data2():\n\n url = \"http://web.mta.info/developers/data/nyct/turnstile/turnstile_{}.txt\"\n dfs = []\n\n for week_num in week_nums:\n file_url = url.format(week_num)\n dfs.append(\n pd.read_csv(\n file_url, parse_dates=[[\"DATE\", \"TIME\"]], keep_date_col=True\n )\n )\n return pd.concat(dfs)", "def get_schedule(self, day, week):\n return self.schedule['schedule'][day][week]", "def return_weekly_figure():\n today = datetime.datetime.now()\n\n while 1:\n try:\n today_str = str(today.day) + \"/\" + \"{:02d}\".format(today.month) + \"/\" + str(today.year)\n match = covid_table.find(date=today_str)\n match.next()\n running_total = 0\n for i in range(7):\n running_total += return_daily_figure(today)\n today = today - datetime.timedelta(days=1)\n average_dose_per_day = round(running_total/7)\n return running_total, average_dose_per_day \n except:\n today = today - datetime.timedelta(days=1)", "def main():\r\n day, mth = int(input()), int(input())\r\n mths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\r\n wks = [\"Saturday\", \"Sunday\", \"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \"Friday\"]\r\n yrday = sum(mths[:mth-1])+day\r\n cur_idx = 0\r\n for _ in range(yrday):\r\n if cur_idx == 6:\r\n cur_idx = 0\r\n else:\r\n cur_idx += 1\r\n print(wks[cur_idx-1])", "def get_week():\n now = dt.now().date()\n return Week.objects.filter(date__lte=now).order_by('-date').first()", "def get_daily_obj(user='1'):\n daily_id = ''\n submissions = [x for x in sub if x.stickied]\n for submission in submissions:\n if 'daily' in submission.title.lower():\n # we will take the post id and write it to a file\n # then we will have the total number of files\n daily_id = submission\n if user == '1':\n return reddit1.submission(id=daily_id)\n elif user == '2':\n return reddit2.submission(id=daily_id)", "def ParseWeeklyChart(self, html, week):\n print 'Parsing chart for week of %s' % week\n chart = []\n soup = BeautifulSoup(html)\n table = soup.findAll('table')[3]\n table_rows = table.findAll('tr')[3:]\n for tr in table_rows:\n row = {}\n cols = tr.findAll('td')\n # Check whether the first cell in the row has a colspan attribute,\n # in which case we've reached the end of the table.\n try:\n cols[0]['colspan']\n break\n except KeyError:\n pass\n title = cols[2].text\n title = title.replace('\\'', '\\'\\'') # Escape single quotes.\n row['title'] = title\n link = cols[2].find('a')\n m = re.match('.*id=(?P<id>.*)\\.htm.*', str(link).lower())\n row['id'] = m.group('id')\n row['studio'] = cols[3].text\n row['gross'] = re.sub('[^\\d\\.]', '', cols[4].text)\n row['theaters'] = re.sub('[^\\d]', '', cols[6].text)\n row['budget'] = re.sub('[^\\d]', '', cols[10].text) or 'NULL'\n row['week'] = week\n self.InsertChartRow(row)", "def _next_week(self) -> datetime.datetime:\n now = datetime.datetime.now()\n for i in range(7):\n yield now + datetime.timedelta(i)", "def forecast_weekly():\n forecast = get_forecast()\n daily = forecast.daily()\n return daily.summary", "def week_report_handle(fans_type):\n\t#import pdb;pdb.set_trace()\n\tlast_day = datetime.date.today()-timedelta(days=datetime.datetime.today().weekday() + 1)\n\ttoday = datetime.date.today()\n\n\tfans_pages = FansPage.objects.filter(fans_type=fans_type, date__gte=last_day, date__lte=today).order_by(\"date\")\n\n\tstart = fans_pages[0]\n\tlast = fans_pages[len(fans_pages) - 1]\n\n\t#talk_about_is = (last.talk_about_is - start.talk_about_is) / (start.talk_about_is + 0.0) * 100\n\ttalk_about_is = (last.talk_about_is - start.talk_about_is)\n\t#total_like_count = (last.total_like_count - start.total_like_count) / (start.total_like_count + 0.0) * 100\n\ttotal_like_count = (last.total_like_count - start.total_like_count)\n\t#total_fans = (last.total_fans - start.total_fans) / (start.total_fans + 0.0) * 100\n\ttotal_fans = (last.total_fans - start.total_fans)\n\treturn {\"talk_about_is\":talk_about_is, \"total_like_count\":total_like_count, \"total_fans\":total_fans, \"start\":start.date, \"last\":last.date}", "def findReadData(day,scope,chan,shot):\n return readData(conf.dataDir + \"%d_01_2013_osc%d/C%dosc%d-%05d.txt\" % (day, scope, chan, scope, shot),\n conf.timeDelay[scope,chan],\n conf.ampMult[scope,chan])", "def spotextract(week,sdpath,bampath):\n print('Extracting data; this may take a few minutes... ',end=''),\n \n #Opening files and initializing dataframes.\n with open(sdpath,encoding='UTF-16') as f:\n sd = pd.read_csv(f,delimiter='\\t',parse_dates=['WeekOf','BreakDt','EndDt','QCDt'], index_col=0)\n #make sure to use parse_dates, or you'll get strings instead of sortable datetimes!\n bam = pd.read_excel(bampath, index_col=0)\n print('Done')\n \n #Create Pivot Table of MCAP Data\n print('Pivoting data... ',end='')\n pt = pd.pivot_table(sd,index='RetMktConcatenated',columns='WeekOf',values='VehicleId', aggfunc=[len])\n print('Done')\n return bam, sd, pt", "def day_of_week(self):\n day_of_week_names = ['Monday', 'Tuesday', 'Wednesday', 'Thursday',\n 'Friday', 'Saturday', 'Sunday']\n diff = self.diff(Date(1, 1, 1970)) + 3\n while diff < 0:\n diff += 7\n print(day_of_week_names[diff % 7])", "def the_week_url():\n return '/timeline/%d/%02d/%d/' % \\\n (datetime.now().year, datetime.now().month, timekit.monthweek(datetime.now()))", "def get_trends_by_week(self):\n try:\n return self.profile_data[\"trendsByWeek\"]\n except Exception as e:\n error_msg = (\"Failed to retrieve weekly trends: {}\"\n \"\".format(str(e)))\n raise PlayerDataException(error_msg)", "def _get_current_week_entries(today, user):\n some_day = today + timedelta(days=1)\n monday_of_week = some_day - timedelta(days=(some_day.isocalendar()[2] - 1))\n sunday_of_week = monday_of_week + timedelta(days=6)\n weekevents = TimeEntry.objects.filter(\n booking_date__gte=monday_of_week, booking_date__lt=sunday_of_week, user=user\n )\n return weekevents", "def full_weeks(self, bot, update, group_name):\n week_number = self.week()\n bot.send_message(update.message.chat_id,\n text='`{}`\\n'.format(group_name) + self.timetable.lessons_week(group_name, week_number),\n parse_mode='Markdown')\n week_number.next()\n bot.send_message(update.message.chat_id,\n text=self.timetable.lessons_week(group_name, week_number),\n parse_mode='Markdown')", "def send_weeklies():\n\n user = irc_handler.get_irc_user_name(line[0])\n\n today = datetime.datetime.now()", "def daysperweek(username):\n path = users_folder_file_path + username\n with open(path + '/preferences.txt', 'r+') as json_file:\n data = json.load(json_file)\n try:\n file = request.files['newfile']\n if file.filename == '':\n return render_template('upload.html', username=username)\n elif 'newfile' in request.files:\n data[\"runner_type\"] = 1\n data[\"prior_training\"] = 1\n json_file.seek(0) # rewind\n json.dump(data, json_file)\n json_file.truncate()\n\n file = request.files['newfile']\n file.save('main/users/{}/activities.csv'.format(username))\n filepath = 'main/users/{}/activities.csv'.format(username)\n process_garmin(filepath, username)\n\n return render_template('daysperweek.html', username=username)\n except BaseException:\n data[\"runner_type\"] = 0\n json_file.seek(0) # rewind\n json.dump(data, json_file)\n json_file.truncate()\n\n return render_template('max_days.html', username=username)", "def get_7d(self):\n records = self.level_model.get_for_period(7)\n self.set_attributes(records, '7 days')", "def week(self):\n if self._week.lower() == 'wild card':\n return WILD_CARD\n if self._week.lower() == 'division':\n return DIVISION\n if self._week.lower() == 'conf. champ.':\n return CONF_CHAMPIONSHIP\n if self._week.lower() == 'superbowl':\n return SUPER_BOWL\n return self._week", "def lessons_week(self, bot, update, group_name):\n week_number = self.week()\n bot.send_message(update.message.chat_id,\n text='`{}`\\n'.format(group_name) + self.timetable.lessons_week(group_name, week_number),\n parse_mode='Markdown')", "def get_weekly_project_durations(self, week=0):\n\n # get the start and end of the desired week\n now = dt.datetime.now()\n monday = now.date() - dt.timedelta(days=now.weekday() + 7*week)\n nextmonday = monday + dt.timedelta(days=7)\n\n # get all jobs and associated projects for the selected week\n # there will be one row per job and associated project such that a job\n # which is assigned to two projects will also have two rows\n self.alog.dbcur.execute(\n 'WITH ja (id, start, dur, act) AS ('\n ' SELECT jobs.id, jobs.start, jobs.duration, activities.label '\n ' FROM jobs JOIN activities ON jobs.activity = activities.id '\n ' WHERE jobs.start >= ? AND jobs.start < ?) '\n 'SELECT ja.id, ja.start, ja.dur, ja.act, projects.label '\n 'FROM ja LEFT OUTER JOIN job_pj ON ja.id = job_pj.job '\n ' LEFT OUTER JOIN projects ON job_pj.project = projects.id',\n (monday, nextmonday))\n\n jobs = pd.DataFrame(self.alog.dbcur.fetchall(),\n columns=('id', 'start', 'duration', 'act',\n 'project'))\n\n # do the same thing for people, but do not select jobs here that have a\n # project associated with them\n # note that it's not necessary to outer join here, because I have already\n # got all the necessary information about jobs above\n self.alog.dbcur.execute(\n 'SELECT jobs.id, people.label '\n 'FROM jobs JOIN job_p, people '\n ' ON jobs.id = job_p.job AND job_p.person = people.id '\n 'WHERE jobs.start >= ? '\n ' AND jobs.start < ?'\n ' AND jobs.id NOT IN (SELECT job FROM job_pj)',\n (monday, nextmonday))\n\n j_p = pd.DataFrame(self.alog.dbcur.fetchall(),\n columns=('id', 'person'))\n\n # sort the people as projects into the job list\n ids = j_p.id.unique()\n for jid in ids:\n people = j_p[j_p.id == jid].person\n\n row = jobs[jobs.id == jid].copy()\n row.project = people.iloc[0]\n\n # add first person to the corresponding job\n jobs[jobs.id == jid] = row\n\n # if several people are associated with the job, add more rows to the\n # job list\n for person in people.values[1:]:\n row.project = person\n jobs = jobs.append(row, ignore_index=True)\n\n projects = pd.DataFrame(jobs.groupby('project').duration.sum(\n ).sort_values(ascending=False))\n acts = jobs.act.unique()\n\n for act in acts:\n projects[act] = 0\n\n for pj in projects.index:\n actdurs = jobs[jobs.project == pj].groupby('act').duration.sum()\n\n projects.loc[pj, actdurs.index] = actdurs\n\n # remove activities which did not occur in any of the projects\n # (these are project-independent activities)\n projects = projects.T[projects.sum() > 0].T\n\n return projects", "def test_date_accept_this_week(self):\n spi_search = \"find date this week\"\n inv_search = \"year:\" + datetime.datetime.strftime(datetime.datetime.today()\\\n +dateutil.relativedelta.relativedelta(days=-(datetime.datetime.today().isoweekday()%7)), '%Y-%m-%d')\n self._compare_searches(inv_search, spi_search)", "def rideshare_pickups(self, since):\n query = \"\"\"\n SELECT\n week as date,\n SUM(n_trips) as value\n FROM rideshare\n WHERE week >= ?\n GROUP BY date\n \"\"\"\n cur = self.con.cursor()\n cur.execute(query, (since,))\n rows = rows_to_dicts(cur, cur.fetchall())\n return rows", "def print_weekly_forecast(update, context):\n city = context.user_data['city']\n provider = context.user_data['provider']\n data = context.bot_data['forecast_data']\n\n for d in data:\n if d['city'] == city and d['provider'] == provider:\n forecast = d['forecast']['week']\n\n message = f\"ะŸั€ะพะณะฝะพะท ะฟะพะณะพะดั‹ ะฝะฐ ะฝะตะดะตะปัŽ ({datetime.date.today().strftime('%A, %e %B')} - {(datetime.date.today() + datetime.timedelta(days=6)).strftime('%A, %e %B')}):\\n\"\n\n for f in forecast:\n date = datetime.datetime.strptime(f['date'], '%Y-%m-%d')\n message += f\"\"\"\n*{datetime.datetime.strftime(date,'%A')}*:\nะœะธะฝ.: {f['min_temp']}. ะœะฐะบั.: {f['max_temp']} \n{f['description']} {f['emoji']}\n\"\"\"\n context.bot.send_message(chat_id=update.effective_chat.id, text=message, parse_mode='markdown')", "def get_week_date():\n return timezone.now()+timezone.timedelta(days=6)", "def read_weekly_publisher_report(self):\n from itertools import repeat\n\n self.ID_TWITTER_BROKEN_AVATAR = kpi_from_db_config.ID_TWITTER_BROKEN_AVATAR\n self.ID_TWITTER_MISSING_AVATAR = kpi_from_db_config.ID_TWITTER_MISSING_AVATAR\n self.ID_TWITTER_MISSING_ISO = kpi_from_db_config.ID_TWITTER_MISSING_ISO\n self.ID_TWITTER_HAS_MANY_ITEMKEY = kpi_from_db_config.ID_TWITTER_HAS_MANY_ITEMKEY\n self.ID_TWITTER_HAS_MANY_PUBLISHER = kpi_from_db_config.ID_TWITTER_HAS_MANY_PUBLISHER\n self.ID_TWITTER_WITHOUT_ITEMKEY = kpi_from_db_config.ID_TWITTER_WITHOUT_ITEMKEY\n\n list_id = [self.ID_TWITTER_BROKEN_AVATAR, \n self.ID_TWITTER_MISSING_AVATAR, \n self.ID_TWITTER_MISSING_ISO, \n self.ID_TWITTER_HAS_MANY_ITEMKEY, \n self.ID_TWITTER_HAS_MANY_PUBLISHER, \n self.ID_TWITTER_WITHOUT_ITEMKEY]\n list_result = [[] for i in repeat(None,len(list_id))]\n for i in range(len(list_id)):\n self.cursor.execute('''\n SELECT value\n FROM public.kpi_report\n WHERE id = %s\n ORDER BY created_at DESC\n LIMIT 2\n ''', [list_id[i]])\n rows_count = self.cursor.rowcount\n \n if (rows_count == 2): # 2 is LIMIT from the query\n for doc in self.cursor:\n list_result[i].append(int(doc[0]))\n elif (rows_count == 1): # Change rows_count > 0 and rows_count < Number of limit\n for doc in self.cursor:\n list_result[i].append(int(doc[0]))\n list_result[i] = list_result[i] + [0] \n else:\n list_result[i] = [0] * 2\n\n return list_result", "def dateweek(line, date):\r\n\tindex = datetime.weekday(date)\r\n\tdateweek = '%s%s%s' % (date.day, cn2en.DATE_WEEK, cn2en.WEEKDAYS[index])\r\n\t\r\n\treturn dateweek == line", "def getTodaysWeather(self, keyword, temp):\n\n\t\t# Variables\n\t\tweather = {} \n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\t\t\n\t\t# Getting todays weather data and populating the dictionary\n\t\tif fio.has_daily() is True and fio.has_hourly() is True:\n\t\t daily = FIODaily.FIODaily(fio)\n\t\t hourly = FIOHourly.FIOHourly(fio)\n\t\t for day in xrange(0, 1):\n\t\t\t\tfor item in daily.get_day(day).keys():\n\t\t\t\t\tif item == \"temperatureMin\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"temperatureMax\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"windSpeed\":\n\t\t\t\t\t\twindSpeed = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"windBearing\":\n\t\t\t\t\t\twindBearing = unicode(daily.get_day(day)[item])\n\t\t\t\t\t\twindBearing = self.helper.convertWindBearing(windBearing)\n\t\t\t\t\tif item == \"sunsetTime\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"sunriseTime\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tweather[\"wind\"] = windBearing + \" \" + windSpeed + \" mph\"\n\t\t\t\tfor item in hourly.get_hour(day).keys():\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[\"current\"] = unicode(hourly.get_hour(0)[item])\n\t\t\t\t\tif item == \"temperature\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(0)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(0)[item])\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(0)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tweather[\"town\"] = self.helper.getCoords(keyword)[2]\n\t\telse:\n\t\t\treturn 'No Todays data'\n\n\t\treturn weather", "def do_rrw(self, arg):\n self.do_timesheet('report extend track week')", "def reporting_week(self):\n\n print(\"Week Numbers:\")\n print(self.time_stamp)\n print(self.time_stamp_iso)\n print(\"Current = {}\".format(self.current_week()))\n print(\"Reporting = {}\".format(self.current_week() - 1))", "def __init__(self, y, w):\n for d in xrange(-10, 370):\n date = datetime.date(y, 1, 1) + datetime.timedelta(d)\n if date.isocalendar() == (y, w, 1):\n date_a = date\n break\n else:\n raise ValueError(\"Invalid week\")\n date_b = date_a + datetime.timedelta(7)\n super(Week, self).__init__(date_a, date_b)", "def getWeeks(year):\n url = \"http://www.boxofficemojo.com/weekend/?yr=%d\" % year\n src = urllib.request.urlopen(url).read()\n soup = BeautifulSoup(src, 'html.parser')\n chart = soup.find(border=\"0\", cellspacing=\"1\", cellpadding=\"5\")\n data = parseTable(chart)\n weeks = [int(row[-1]) for row in data[1:]]\n return weeks", "def record_weeks(self, user, start, end, num=10):\n query = self.user_weeks_between(user, start, end).order_by('-plays')[:num]\n for week in query:\n date = ldates.date_of_index(week.week_idx)\n yield week, date", "def get_week_date(self, raw_week: str) -> tuple:\n\n search_result = re.search(r'^(\\d+.\\d+)\\s+-\\s+\\d+.\\d+', raw_week)\n\n if \"from\" in raw_week:\n week = re.sub(r'^\\D+', '', raw_week)\n\n elif search_result:\n week = search_result.group(1)\n else:\n week = \"{}.{}\".format(current_day, current_month)\n\n week_in_date_format_1900 = datetime.datetime.strptime(week, \"%d.%m\")\n currect_week = week_in_date_format_1900.replace(current_year)\n\n return currect_week.isoformat(), currect_week.isocalendar()[1]", "def get_data_by_time(filename):\n with open(filename, 'r') as f_in:\n # set up csv reader object\n reader = csv.DictReader(f_in)\n result = {}\n result['n_week'] = [0] * 7\n result['d_week'] = [0] * 7\n result['cus_hour'] = [0] * 24\n result['sub_hour'] = [0] * 24\n for data in reader:\n duration = float(data['duration'])\n if data['day_of_week'] == 'Sunday':\n result['n_week'][0] += 1\n result['d_week'][0] += duration\n elif data['day_of_week'] == 'Monday':\n result['n_week'][1] += 1\n result['d_week'][1] += duration\n elif data['day_of_week'] == 'Tuesday':\n result['n_week'][2] += 1\n result['d_week'][2] += duration\n elif data['day_of_week'] == 'Wednesday':\n result['n_week'][3] += 1\n result['d_week'][3] += duration\n elif data['day_of_week'] == 'Thursday':\n result['n_week'][4] += 1\n result['d_week'][4] += duration\n elif data['day_of_week'] == 'Friday':\n result['n_week'][5] += 1\n result['d_week'][5] += duration\n else:\n result['n_week'][6] += 1\n result['d_week'][6] += duration\n\n hour = int(data['hour'])\n if data['user_type'] == 'Customer':\n result['cus_hour'][hour] += 1\n else:\n result['sub_hour'][hour] += 1\n return result", "def compute_heatsum_per_week(heatsum_day, day=5):\n heatsum_week = {}\n for k in heatsum_day:\n year, week, weekday = map(int, datetime.datetime.strftime(datetime.datetime.strptime(k, '%Y-%m-%d'), '%Y %W %w').split())\n if weekday == day:\n heatsum_week[(year, week)] = heatsum_day[k]\n return heatsum_week", "def week(self, week):\n\n self._week = week", "def test_week_in_range(self):\n weeks = list(range(1, 54))\n seq = 21\n for week in weeks:\n with self.subTest(week=week):\n result = star_barcode.construct_postscript(\n week=week,\n bwipp_location=self.bwipp,\n issn=self.issn,\n sequence=seq,\n header_line=''\n )\n self.assertGreater(\n result.find(f'{self.issn} {seq:02} {week:02}'),\n -1\n )", "def get_week_order_detailed(date):\n query = sqla.text(\"\"\"\n SELECT \"user\".first_name, \"user\".last_name, \"user\".corridor, \"user\".room, bt.name\n FROM bread_order_date AS bod\n JOIN bread_order AS bo ON bod.id = bo.date_id\n JOIN \"user\" ON bo.user_id = \"user\".id\n JOIN bread_type bt ON bo.type_id = bt.id\n WHERE bod.id = :date\n ORDER BY \"user\".corridor, \"user\".room asc\n \"\"\")\n return db.session.execute(query, {\"date\": date.id})", "def day_of_week(dt):\n cday = dt\n mday = 2\n uday = cday.isocalendar()[2] + mday\n try:\n if uday > 7:\n CURRDAY = uday - 7\n log.debug(\"1;EME;RUNNING;000;Scheduler.py;Setting customized day of week>7 : \", CURRDAY)\n else:\n CURRDAY = uday\n log.debug(\"1;EME;RUNNING;000;Scheduler.py;Setting customized day of week : \", CURRDAY)\n return CURRDAY\n except Exception as e:\n log.exception(\"1;EME;FAILURE;700;SCHEDULE ERROR \" + str(e), exc_info=False)\n sys.exit(0)", "def dayweek_clean(fecha):\n\n try:\n lista = fecha.split(sep = '/')\n fecha = '-'.join(reversed(lista))\n temp = pd.Timestamp(fecha)\n dia_semana = (temp.dayofweek, temp.day_name())\n return dia_semana[1]\n \n except:\n #print ('hola')\n return None", "def get_weekday():\n try:\n day = config.getint(\"threadbot\", \"debug_day\")\n except ConfigParser.NoOptionError:\n d = datetime.date.today()\n day = d.weekday()\n sort_by_new = False\n\n # 0 / Monday / Feedback thread\n # 1 / Tuesday / How do I make this sound thread\n # 2 / Wednesday / There are no stupid questions thread\n # 3 / Thursday / Marketplace thread\n dayname = \"waffles\"\n if day == 0:\n dayname = \"monday\"\n sort_by_new = True\n elif day == 1:\n dayname = \"tuesday\"\n sort_by_new = True\n elif day == 2:\n dayname = \"wednesday\"\n sort_by_new = True\n elif day == 3:\n dayname = \"thursday\"\n sort_by_new = False\n else:\n sys.exit(1) # woo inelegance\n\n return dayname, sort_by_new", "def get_rollover_weeks(shop):\n d = {}\n ods, r = get_rollovers(shop)\n\n for od in ods:\n week = int(od.eta.strftime('%W'))+1\n if d.has_key(week):\n d[week] += int(od.plan)\n else:\n d[week] = int(od.plan)\n\n # remove the pulled from this week\n this_week = int(datetime.datetime.today().strftime('%W'))+1 \n if d.has_key(this_week):\n d[this_week] = d[this_week] - get_pulled(shop)[1] \n\n # build the return list of (week, '00:00') tuples\n l = []\n d = sorted(d.items()) # sort dictionary by week\n for key, minutes in d:\n formatted_time = _get_display_hours(minutes)\n l.append((key,formatted_time))\n\n return l", "def has_picked_week(self, week):\n return self.find_pick_for_week(week, key_only=True) is not None", "def get_arterial(file_path,category):\n book = xlrd.open_workbook(file_path)\n file_name = os.path.basename(file_path)\n year = str(20) + \"\".join([str(s) for s in file_name if s.isdigit()]) ## gets the year from filename\n Month = strptime(file_name[2:5],'%b').tm_mon ## gets month no\n mydate = datetime.date(int(year),Month, 1) ## first day of the month and year\n #mydate_1 = mydate - datetime.timedelta(days=1) ## interested in last month of this year as data corresponds to last month and same year\n mydate_2 = mydate - datetime.timedelta(days=368) ## interested in last month of last year as data corresponds to last month and last year \n #monthid1 = str(mydate_1.strftime(\"%Y\")) + str(mydate_1.strftime(\"%m\")) ## 200706 for July 2007 file\n monthid2 = str(mydate_2.strftime(\"%Y\")) + str(mydate_2.strftime(\"%m\")) ## 200606 for July 2007 file\n try:\n if category.lower() == \"rural\":\n index = 3\n elif category.lower() == \"urban\":\n index = 4\n else:\n index = 5\n sheet = book.sheet_by_index(index)\n list_states = sheet.col_values(0)\n xstart = list_states.index('Connecticut')\n xend = list_states.index('TOTALS')\n #list1 = sheet.col_slice(colx= 6,start_rowx=xstart,end_rowx= xend - 1)\n #list1 = [w.value for w in list1]\n list2 = sheet.col_slice(colx= 7,start_rowx=xstart,end_rowx= xend - 1)\n list2 = [w.value for w in list2]\n list3 = sheet.col_slice(colx= 0,start_rowx=xstart,end_rowx= xend - 1)\n list3 = [w.value.lower() for w in list3] ## take lowercase for direct match later\n df = pd.concat([pd.DataFrame(list3),pd.DataFrame(list2)], axis = 1) # pd.DataFrame(list1),\n #col_name_1 = category + '_Arterial_' + monthid1\n col_name_2 = category + '_Arterial_' + monthid2\n df.columns = ['State', col_name_2 ] ## col_name_1,\n df[col_name_2].replace('', np.nan, inplace=True) ## removes rows with blank records ( zonal categories)\n df['State'].replace('', np.nan, inplace=True)\n curr_monthid = str(mydate.strftime(\"%Y\")) + str(mydate.strftime(\"%m\")) ## 200707 for July 2007 file\n df['data_monthid'] = curr_monthid\n df.dropna(subset=[col_name_2], inplace=True)\n df.dropna(subset=['State'], inplace=True)\n df = df[~df.State.str.contains(\"subtotal\")] ### causes problems on joins, there in most files\n df = df[df.State != \"total\"] ## causes problems on joins, is there only in specific files\n df['State'] = df.State.str.strip() ## removes leading and lagging white spaces if any\n df2 = pd.melt(df,id_vars=['State','data_monthid'],var_name=['category'], value_name='Million_Vehicle_Miles')\n return df2\n except:\n print(\"error in file \",os.path.basename(file_path))", "def generate_weekly_report(f, upgrades=False):\n data = collections.OrderedDict()\n\n f.seek(0)\n reader = csv.reader(exclude_headers(f), delimiter='\\t')\n\n cumulative = 0\n\n for row in sorted(reader, key=lambda r: datestr_to_datetime(r[COLUMN_DATE])):\n dt = datestr_to_datetime(row[COLUMN_DATE])\n weekdt = datetime.datetime.strptime('{} {} 0'.format(dt.year, dt.isocalendar()[1]), '%Y %W %w')\n date = datetime_to_str(weekdt)\n install = row[COLUMN_DOWNLOAD_TYPE]\n downloads = int(row[COLUMN_DOWNLOADS])\n\n if install != DOWNLOAD_TYPE_INSTALL:\n continue\n\n if date not in data:\n data[date] = (0, cumulative)\n\n week, cum = data[date]\n cumulative += downloads\n\n data[date] = (week + downloads, cumulative)\n\n # Sort the data\n data = collections.OrderedDict(sorted(data.items(), key=lambda i: i[0]))\n\n return data", "def meetup_day(year, month, day_of_week, day_occurrence):\n \n cal = calendar.monthcalendar(year, month)\n day_of_week_index = days_of_week[day_of_week]\n \n not_teenth = day_occurrence != 'teenth'\n day_is_in_first_week = cal[0][day_of_week_index] != 0\n \n if not_teenth and day_is_in_first_week:\n week_index = week_indices[day_occurrence]\n \n elif not_teenth and not day_is_in_first_week:\n week_index = week_indices[day_occurrence] + 1\n \n else:\n for i in range(len(cal)):\n if cal[i][day_of_week_index] >= 10:\n week_index = i\n break\n\n date = cal[week_index][day_of_week_index]\n return datetime.date(year, month, date)", "def daysOfTheWeek():\n daysOfTheWeek: Tuple[str] = [\"Monday\", \"Tuesday\",\n \"Wednesday\", \"Thursday\", \"Friday\", \"Saturday\", \"Sunday\"]\n for dayOfTheWeek in daysOfTheWeek:\n yield dayOfTheWeek", "def get_last_seven_days():\n logs = json.load(open(\"seven_log\", \"r\"))\n days = [day for day in logs]\n usage = [[logs[day][gpu] for gpu in logs[day]] for day in logs]\n return days, usage", "def get_arterial(file_path,category):\n book = xlrd.open_workbook(file_path)\n file_name = os.path.basename(file_path)\n year = str(20) + \"\".join([str(s) for s in file_name if s.isdigit()]) ## gets the year from filename\n Month = strptime(file_name[2:5],'%b').tm_mon ## gets month no\n mydate = datetime.date(int(year),Month, 1) ## first day of the month and year\n mydate_1 = mydate - datetime.timedelta(days=1) ## interested in last month of this year as data corresponds to last month and same year\n mydate_2 = mydate - datetime.timedelta(days=368) ## interested in last month of last year as data corresponds to last month and last year \n #monthid1 = str(mydate_1.strftime(\"%Y\")) + str(mydate_1.strftime(\"%m\")) ## 200706 for July 2007 file\n monthid2 = str(mydate_2.strftime(\"%Y\")) + str(mydate_2.strftime(\"%m\")) ## 200606 for July 2007 file\n try:\n if category.lower() == \"rural\":\n index = 3\n elif category.lower() == \"urban\":\n index = 4\n else:\n index = 5\n sheet = book.sheet_by_index(index)\n list_states = sheet.col_values(0)\n xstart = list_states.index('Connecticut')\n xend = list_states.index('TOTALS')\n #list1 = sheet.col_slice(colx= 8,start_rowx=xstart,end_rowx= xend - 1)\n #list1 = [w.value for w in list1]\n list2 = sheet.col_slice(colx= 9,start_rowx=xstart,end_rowx= xend - 1)\n list2 = [w.value for w in list2]\n list3 = sheet.col_slice(colx= 0,start_rowx=xstart,end_rowx= xend - 1)\n list3 = [w.value.lower() for w in list3] ## take lowercase for direct match later\n df = pd.concat([pd.DataFrame(list3),pd.DataFrame(list2)], axis = 1) # ,pd.DataFrame(list1)\n #col_name_1 = category + '_Arterial_' + monthid1\n col_name_2 = category + '_Arterial_' + monthid2\n df.columns = ['State', col_name_2 ] # col_name_1, \n df[col_name_2].replace('', np.nan, inplace=True) ## removes rows with blank records ( zonal categories)\n df['State'].replace('', np.nan, inplace=True)\n curr_monthid = str(mydate.strftime(\"%Y\")) + str(mydate.strftime(\"%m\")) ## 200707 for July 2007 file\n df['data_monthid'] = curr_monthid\n df.dropna(subset=[col_name_2], inplace=True)\n df.dropna(subset=['State'], inplace=True)\n df = df[~df.State.str.contains(\"subtotal\")] ### causes problems on joins, there in most files\n df = df[df.State != \"total\"] ## causes problems on joins, is there only in specific files\n df['State'] = df.State.str.strip() ## removes leading and lagging white spaces if any\n df2 = pd.melt(df,id_vars=['State','data_monthid'],var_name=['category'], value_name='Million_Vehicle_Miles')\n return df2\n except:\n print(\"error in file \",os.path.basename(file_path))", "def get_week(time_index):\n return np.array(time_index.week).reshape(-1,1)", "def loadUZHMensaForDay(uzhConnectionInfo, date, day, lang, db):\n\n if (lang == \"en\" and \"id_en\" in uzhConnectionInfo.keys()):\n apiUrl = \"https://zfv.ch/\" + lang + \"/menus/rssMenuPlan?type=uzh2&menuId=\" + str(\n uzhConnectionInfo[\"id_en\"]) + \"&dayOfWeek=\" + str(day)\n else:\n apiUrl = \"https://zfv.ch/\" + lang + \"/menus/rssMenuPlan?type=uzh2&menuId=\" + str(\n uzhConnectionInfo[\"id\"]) + \"&dayOfWeek=\" + str(day)\n\n print(\"Day: \" + str(day) + \"/5\")\n print(\"Url: \" + str(apiUrl))\n\n return loadUZHMensaForUrl(uzhConnectionInfo, apiUrl, db, lang, date)", "def do_upm(self, arg):\n self.do_timesheet('update week')", "def menu_weekly(self, building_id):\n din = DiningV2(self.bearer, self.token)\n response = {'result_data': {'Document': {}}}\n days = []\n for i in range(7):\n date = str(datetime.date.today() + datetime.timedelta(days=i))\n v2_response = din.menu(building_id, date)\n if building_id in VENUE_NAMES:\n response[\"result_data\"][\"Document\"][\"location\"] = VENUE_NAMES[building_id]\n else:\n response[\"result_data\"][\"Document\"][\"location\"] = v2_response[\"result_data\"][\"days\"][0][\"cafes\"][building_id][\"name\"]\n formatted_date = datetime.datetime.strptime(date, '%Y-%m-%d').strftime('%-m/%d/%Y')\n days.append({\"tblDayPart\": get_meals(v2_response, building_id), \"menudate\": formatted_date})\n response[\"result_data\"][\"Document\"][\"tblMenu\"] = days\n return normalize_weekly(response)", "def analyze_weekly(feature_matrix, config, data_path):\n\n if config['model_name'] == 'XGBoost':\n print(\"XGBoost is used\")\n elif config['model_name'] == 'RandomForestClassifier':\n print(\"RandomForestClassifier is used\")\n if config['late_fusion_flag']:\n print(\"Late fusion is used\")\n else:\n print(\"Early fusion is used\")\n if config['feature_type'] == 'both_feature_types':\n print(\"Both type 1 and 2 features are used\")\n elif config['feature_type'] == 'enterprise':\n print(\"Only enterprise features are used\")\n elif config['feature_type'] == 'log':\n print(\"Only log features are used\")\n # Format: YYYY-WW (according to ISO8601)\n first_pred_time = config['first_week']\n # Format: YYYY-WW (according to ISO8601)\n last_prediction_time = config['last_week']\n # Continue if cont_week is not None\n if config['cont_week'] == 'None':\n # Init classifier_dict\n classifier_dict = dict()\n else:\n # Format: YYYY-WW (according to ISO8601)\n load_time = add_week_to_cwdate(config['cont_week'], weeks=-1)\n # Load the last available pred_time\n with open(data_path + '/interim/results_{}.pickle'.format(load_time), 'rb') as f:\n classifier_dict = pickle.load(f)\n # Format: YYYY-WW (according to ISO8601)\n first_pred_time = config['cont_week']\n # Iterate through pred_time\n feature_matrix = filter_by_feature_type(feature_matrix, feature_type=config['feature_type'])\n # Iterate through weeks\n for pred_time in cal_week_gen(first_pred_time, last_prediction_time):\n print(\"*************************************************************\")\n print(\"Prediction time: {}\".format(pred_time))\n # Log start time to calculate the elapsed time later\n time_start = time.time()\n # Calculate the last used time for train_val data set\n # Format: YYYY-WW (according to ISO8601)\n last_time = add_week_to_cwdate(pred_time, weeks=-config['lookahead_window_len'] - \\\n config['fixed_window_in_weeks']) \\\n if config['fixed_window_in_weeks'] != -1 \\\n else add_week_to_cwdate(feature_matrix.pred_time.min(), weeks=-1)\n # Divide into train_val and test DataFrames\n feature_matrix_train_val, feature_matrix_test = filter_and_split_feature_matrix_by_cal_week(\n feature_matrix[feature_matrix.pred_time > last_time], pred_time, config)\n # Split features and labels and drop unnecessary columns\n # x_test (len(x_test)=1, x_test[0].shape=(num_samples, num_features))\n x_test, y_test = create_list_dataset(feature_matrix_test)\n # Tune hyperparameter\n classifier, info = tune_hyperparameter(feature_matrix_train_val, config, data_path)\n # x_train_val (len(x_train_val)=num_train_val_week, x_train_val.shape=(num_train_val_samples, num_features))\n # y_train_val (len(y_train_val)=num_train_val_week, y_train_val.shape=(num_train_val_samples))\n x_train_val, y_train_val = create_list_dataset(feature_matrix_train_val)\n # x_train_val_flat (num_train_val_samples, num_features)\n # y_train_val_flat (num_train_val_samples)\n x_train_val_flat, y_train_val_flat = pd.concat(x_train_val), pd.concat(y_train_val)\n # Sample\n try:\n # Extract best trial\n best_trial = info.value.argmax()\n # Extract ratio\n ratio = info.loc[best_trial, :].params_ratio\n # Extract sampling_strategy\n sampling_strategy = info.loc[best_trial, :].params_sampling_strategy\n # x_train_flat (num_train_val_samples after sampling, num_features)\n # y_train_flat (num_train_val_samples after sampling)\n x_train_val_flat, y_train_val_flat = sampling(x_train_val_flat, y_train_val_flat, ratio,\n flag=sampling_strategy)\n except:\n print(\"Error: Failed to sample the data set\")\n # Train classifier\n classifier.fit(x_train_val_flat, y_train_val_flat)\n # Make prediction on test set\n y_pred, y_prob = predict(x_test[0], classifier)\n # Create model info\n model_info = create_model_info(classifier,\n config, info,\n y_test[0], y_pred, y_prob)\n classifier_dict[pred_time] = model_info\n print(\"***Train and Validation***\")\n output_report(x_train_val, y_train_val, classifier)\n print(\"***Test***\")\n output_report(x_test, y_test, classifier, test_flag=True)\n # Update feature_matrix and (classifier_dict)\n update_log(data_path + '/interim/results_{}.pickle', classifier_dict, pred_time)\n time_diff = time.time() - time_start\n print(\"Time elapsed: {} minutes\".format(round(time_diff / 60, 2)))\n print(\"*************************************************************\")\n return classifier_dict", "def weekly(evictiondata):\r\n evictions_per_week = {}\r\n for index, row in evictiondata.iterrows():\r\n if row['week_date'] not in evictions_per_week.keys():\r\n evictions_per_week[row['week_date']] = row['filings_2020']\r\n else:\r\n evictions_per_week[row['week_date']] += row['filings_2020']\r\n return evictions_per_week", "def get_weather_report(takeoff,weather):\n # HINT: Looping through the dictionary is VERY slow because it is so large\n # You should convert the takeoff time to an ISO string and search for that first.\n # Only loop through the dictionary as a back-up if that fails.\n \n # Search for time in dictionary\n # As fall back, find the closest time before takeoff\n \n from dateutil.parser import parse\n \n result = []\n takeofftime = takeoff.isoformat()\n \n if takeofftime in weather.keys():\n result = weather[takeofftime]\n \n elif takeofftime not in weather.keys():\n weatherlist = list(weather.keys())\n count = len(weatherlist)\n for m in weatherlist[::-1]:\n if m < takeofftime:\n result = weather[m]\n \n else: \n result = None\n \n \n return result", "def week_number(self, bot, update):\n bot.send_message(update.message.chat_id,\n text='ะกะตะนั‡ะฐั *{}* ัƒั‡ะตะฑะฝะฐั ะฝะตะดะตะปั.'.format(self.week()),\n parse_mode='Markdown')", "def week_schedule(year, stype, week):\n url = schedule_url(year, stype, week)\n try:\n dom = xml.parse(urllib.request.urlopen(url))\n except urllib.error.HTTPError:\n print >> sys.stderr, 'Could not load %s' % url\n return []\n\n games = []\n for g in dom.getElementsByTagName(\"g\"):\n gsis_id = g.getAttribute('eid')\n games.append({\n 'eid': gsis_id,\n 'wday': g.getAttribute('d'),\n 'year': year,\n 'month': int(gsis_id[4:6]),\n 'day': int(gsis_id[6:8]),\n 'time': g.getAttribute('t'),\n 'meridiem': None,\n 'season_type': stype,\n 'week': week,\n 'home': g.getAttribute('h'),\n 'away': g.getAttribute('v'),\n 'gamekey': g.getAttribute('gsis'),\n })\n\n for game in games:\n h = int(game['time'].split(':')[0])\n m = int(game['time'].split(':')[1])\n if 0 < h <= 5: # All games before \"6:00\" are PM until proven otherwise\n game['meridiem'] = 'PM'\n\n if game['meridiem'] is None:\n\n days_games = [g for g in games if g['wday'] == game['wday']]\n preceeding = [g for g in days_games if g['eid'] < game['eid']]\n proceeding = [g for g in days_games if g['eid'] > game['eid']]\n\n # If any games *after* this one are AM then so is this\n if any(g['meridiem'] == 'AM' for g in proceeding):\n game['meridiem'] = 'AM'\n # If any games *before* this one are PM then so is this one\n elif any(g['meridiem'] == 'PM' for g in preceeding):\n game['meridiem'] = 'PM'\n # If any games *after* this one have an \"earlier\" start it's AM\n elif any(h > t for t in [int(g['time'].split(':')[0]) for g in proceeding]):\n game['meridiem'] = 'AM'\n # If any games *before* this one have a \"later\" start time it's PM\n elif any(h < t for t in [int(g['time'].split(':')[0]) for g in preceeding]):\n game['meridiem'] = 'PM'\n\n if game['meridiem'] is None:\n if game['wday'] not in ['Sat', 'Sun']:\n game['meridiem'] = 'PM'\n if game['season_type'] == 'POST':\n game['meridiem'] = 'PM'\n\n return games", "def testWeeklyOvertimes(self):\n dates = self.dates\n for day_num in xrange(28, 31):\n dates.append(utils.add_timezone(\n datetime.datetime(2011, 4, day_num)\n ))\n for day_num in xrange(5, 9):\n dates.append(utils.add_timezone(\n datetime.datetime(2011, 5, day_num)\n ))\n for day in dates:\n self.make_logs(day)\n\n def check_overtime(week0=Decimal('55.00'), week1=Decimal('55.00'),\n overtime=Decimal('30.00')):\n self.login_user(self.superuser)\n response = self.client.get(self.url, self.args)\n weekly_totals = response.context['weekly_totals'][0][0][0][2]\n self.assertEqual(weekly_totals[0], week0)\n self.assertEqual(weekly_totals[1], week1)\n self.assertEqual(weekly_totals[5], overtime)\n check_overtime()\n #Entry on following Monday doesn't add to week1 or overtime\n self.make_logs(utils.add_timezone(datetime.datetime(2011, 5, 9)))\n check_overtime()\n #Entries in previous month before last_billable do not change overtime\n self.make_logs(utils.add_timezone(datetime.datetime(2011, 4, 24)))\n check_overtime()\n #Entry in previous month after last_billable change week0 and overtime\n self.make_logs(utils.add_timezone(\n datetime.datetime(2011, 4, 25, 1, 0)\n ))\n check_overtime(Decimal('66.00'), Decimal('55.00'), Decimal('41.00'))", "def get_weekday_song():\n current_day = day_of_week()\n print(f\"WEEKDAY:{current_day}\")\n if (current_day == \"Monday\"):\n return random.choice([ \\\n \"Monday - Imagine Dragons\", \\\n \"Monday Morning - Quinn XCII\", \\\n \"Monday Mornin' Missin' You - Blake Shelton\", \\\n \"Monday Morning - Fleetwood Mac\", \\\n \"Monday, Monday\", \\\n \"Every Day is a Monday\"])\n elif (current_day == \"Tuesday\"):\n return random.choice([ \\\n \"Tuesdays\", \\\n \"Tuesday (feat Drake)\", \\\n \"Tuesday's Gone\", \\\n \"Tuesday I'll Be Gone\", \\\n \"Taco Tuesday - Migos\", \\\n \"Taco Tuesday - Lil Jon\", \\\n \"Tuesday Afternoon\"])\n elif (current_day == \"Wednesday\"):\n return random.choice([ \\\n \"Wednesday Morning - Macklemore\", \\\n \"Wednesday Night Interlude - Drake\", \\\n \"Wednesday Morning, 3AM\"])\n elif (current_day == \"Thursday\"):\n return random.choice([ \\\n \"Thursday - The Weeknd\", \\\n \"Thursday - Jess Glyne\", \\\n \"(Thursday) Here's Why I Did Not Go to Work Today\", \\\n \"Like a Summer Thursday\", \\\n \"Sweet Thursday\"])\n elif (current_day == \"Friday\"):\n return random.choice([ \\\n \"Friday Night - Eric Paslay \", \\\n \"Last Friday Night\", \\\n \"Finally Friday - George Jones\", \\\n \"Friday Rascall Flatts\", \\\n \"I Gotta Feeling\", \\\n \"Friday Night in Dixie\", \\\n \"Fridays Child\", \\\n \"Hymn for the Weekend\", \\\n \"Friday Night Fish Fry\", \\\n \"Friday Night - Lady A\", \\\n \"Hello Friday - Flo Rida\"])\n elif (current_day == \"Saturday\"):\n return random.choice([ \\\n \"Louisiana Saturday Night\", \\\n \"American Saturday Night\", \\\n \"Small Town Saturday Night\", \\\n \"Satuday Night's Alright\", \\\n \"Saturday in the Park\", \\\n \"Saturday - Twenty One Pilots\", \\\n \"Saturday Nights - Khalid\", \\\n \"Saturday Sun - Vance Joy\"])\n elif (current_day == \"Sunday\"):\n return random.choice([ \\\n \"Sunday Candy\", \\\n \"Sunday Morning - Parmalee\", \\\n \"Sunday Morning - Maroon 5\", \\\n \"Sunday Best\", \\\n \"Sunday\", \\\n \"Closed on Sunday\", \\\n \"Raining on Sunday\", \\\n \"A Month of Sundays\", \\\n \"That's What I Love About Sunday\", \\\n \"Sunday Drive\", \\\n \"Another Sunday in the South - Miranda Lambert\", \\\n \"Sunday - Sia\", \\\n \"Sunday Morning - Maroon 5\", \\\n \"A Month of Sundays - Don Henly\", \\\n \"Lazing on a Sunday Afternoon - Queen\", \\\n \"Sunday Morning Coming Down\", \\\n \"Blue Sunday - The Doors\", \\\n \"A Sunday Kind of Love - Etta James\"])", "def getCurrentWeek(self):\n return self.wcount % 48", "def lessons_today(self, bot, update, group_name):\n week_number = self.week()\n day_number = pendulum.now('Europe/Kiev')\n\n bot.send_message(update.message.chat_id,\n text='`{}`\\n{}'.format(group_name,\n self.timetable.lessons_per_day(group_name,\n day_number,\n week_number)),\n parse_mode='Markdown')", "def do_upw(self, arg):\n self.do_timesheet('update week')", "def load_data(city, month, day):\n while month != \"\":\n # load data file into a dataframe\n filename = CITY_DATA[city]\n df = pd.read_csv(filename)\n\n # convert the Start Time column to datetime\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n\n # extract month and day of week from Start Time to create new columns\n df['month'] = df['Start Time'].dt.month\n # df['day_of_week'] = df['Start Time'].dt.day_name()\n\n\n try: df['day_of_week'] = df['Start Time'].dt.weekday_name\n except: df['day_of_week'] = df['Start Time'].dt.day_name()\n else: df['day_of_week'] = df['Start Time'].dt.weekday\n \n \n \n df['hour'] = df['Start Time'].dt.hour\n\n\n # filter by month if applicable\n if month != 'all':\n # use the index of the months list to get the corresponding int\n # months = ['january', 'february', 'march', 'april', 'may', 'june','july','august','september','october','november','december']\n month = int(months.index(month)) + 1\n \n # filter by month to create the new dataframe\n df = df.loc[df['month'] == month]\n\n # filter by day of week if applicable\n if day != 'all':\n # filter by day of week to create the new dataframe\n df = df.loc[df['day_of_week'] == day.title()]\n \n return df", "def parse_presence(filename=None):\n if filename is None:\n filename = 'presence.txt'\n member_totals = dict()\n totals = dict()\n total_time = 0.0\n f = gzip.open(filename, 'r')\n\n workdays = KNESSET_WORKING_DAYS\n last_timestamp = None\n todays_timestamp = date.today().isocalendar()[:2]\n reports = []\n enough_data = []\n line = f.readline()\n data = line.split(',')\n scrape_time = datetime.strptime(data[0], '%Y-%m-%d %H:%M:%S')\n\n for line in f:\n data = line.split(',')\n last_time = scrape_time\n scrape_time = datetime.strptime(data[0], '%Y-%m-%d %H:%M:%S')\n time_in_day = scrape_time.hour + scrape_time.minute / 60.0\n current_timestamp = scrape_time.isocalendar()[:2]\n\n if scrape_time.weekday() not in workdays or (time_in_day < WORKDAY_START) or (time_in_day > WORKDAY_END):\n continue\n if current_timestamp == todays_timestamp:\n break\n if current_timestamp != last_timestamp: # when we move to next timestamp (week), parse the last weeks data\n if len(reports) > 200: # only if we have enough reports from this week (~50 hours sampled)\n enough_data.append(last_timestamp) # record that we had enough reports this week\n subtotals = dict()\n subtotal_time = 0\n for presence_report in reports:\n minutes = min(presence_report[0], 15) # each report is valid for maximum of 15 minutes\n subtotal_time += minutes\n for i in presence_report[1]:\n if i in subtotals:\n subtotals[i] += minutes\n else:\n subtotals[i] = minutes\n for sub_total in subtotals:\n if sub_total in totals:\n totals[sub_total] += float(subtotals[sub_total])\n else:\n totals[sub_total] = float(subtotals[sub_total])\n total_time += subtotal_time\n else: # not enough data this week.\n # if last_timestamp!=None:\n # not_enough_data.append(last_timestamp)\n pass\n # delete the reports list\n reports = []\n\n for total in totals:\n d = last_timestamp\n weekly_hours_for_member = round(float(totals[total]) / total_time * WORKING_HOURS_PER_WEEK)\n if total in member_totals:\n member_totals[total].append((d, weekly_hours_for_member))\n else:\n member_totals[total] = [(d, weekly_hours_for_member)]\n totals = {}\n total_time = 0.0\n last_timestamp = scrape_time.isocalendar()[:2]\n\n # for every report in the file, add it to the array as a tuple: (time, [list of member ids])\n reports.append(((scrape_time - last_time).seconds / 60, [int(x) for x in data[1:] if len(x.strip()) > 0]))\n return member_totals, enough_data", "def main():\n import sys\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('-gh_user', action=\"store\", dest='gh_user', help='GitHub login name. Can also set as env variable of same name.')\n parser.add_argument('-gh_pass', action=\"store\", dest='gh_pass', help='GitHub password. Can also set as env variable of same name.')\n parser.add_argument('-gh_api', action=\"store\", dest='gh_api', help='GitHub URL for the enterprise instance being used.')\n parser.add_argument('-template', action=\"store\", dest='template', help='Markdown template for the weekly.')\n parser.add_argument('-config', action=\"store\", dest='config', help='JSON formatted configuration.')\n parser.add_argument('--test', dest='test', action='store_true')\n\n args = parser.parse_args(sys.argv[1:])\n print \"Running weekly code\"\n\n # Expected arguments.\n gh_user = None\n if args.gh_user:\n gh_user = args.gh_user\n elif 'gh_user' in sys.env:\n gh_user = sys.env['gh_user']\n else:\n gh_user = raw_input('GitHub login:')\n\n gh_pass = None\n if args.gh_pass:\n gh_pass = args.gh_pass\n elif 'gh_pass' in sys.env:\n gh_pass = sys.env['gh_pass']\n else:\n gh_pass = getpass('GitHub password:')\n\n gh_api = args.gh_api\n\n # Parse all the other config from the JSON. Should have the template in there too.\n import json\n config_json = None\n with open(args.config, 'r') as jf:\n config_json = json.load(jf)\n\n weekly_config = config_json['weekly_config']\n configs = config_json['projects']\n group_name = config_json['group_name']\n # Allow overriding of the template. Fall back on assuming it is in the JSON.\n if args.template:\n template = args.template\n else:\n template = config_json['template']\n\n # Run the weekly update.\n weekly(\n gh_user,\n gh_pass,\n gh_api,\n weekly_config,\n configs,\n group_name,\n template=template,\n test= True if args.test else False)", "def get_week_range(year, week):\n first_day = datetime.strptime(f\"{year}-W{week}-1\", \"%Y-W%W-%w\").date()\n last_day = first_day + timedelta(days=6)\n return first_day, last_day", "def collect_gw(gw, gameweek_path, data_path, player_path): \n rows = []\n fieldnames = []\n fixtures_home, fixtures_away = get_fixtures(data_path)\n teams = get_teams(data_path)\n names, positions = get_positions(data_path)\n for root, dirs, files in os.walk(player_path):\n for fname in files:\n if fname == 'gw.csv':\n fpath = os.path.join(root, fname)\n fin = open(fpath, 'rU')\n reader = csv.DictReader(fin)\n fieldnames = reader.fieldnames\n for row in reader:\n if int(row['round']) == gw:\n id = int(os.path.basename(root).split('_')[-1])\n name = names[id]\n position = positions[id]\n fixture = int(row['fixture'])\n if row['was_home'] == True or row['was_home'] == \"True\":\n row['team'] = teams[fixtures_home[fixture]]\n else:\n row['team'] = teams[fixtures_away[fixture]]\n row['name'] = name\n row['position'] = position\n rows += [row]\n\n fieldnames = ['name', 'position', 'team'] + fieldnames\n outf = open(os.path.join(gameweek_path, \"gw\" + str(gw) + \".csv\"), 'w', encoding=\"utf-8\")\n writer = csv.DictWriter(outf, fieldnames=fieldnames, lineterminator='\\n')\n writer.writeheader()\n for row in rows:\n writer.writerow(row)", "def get_outdoor_data(temp_dir,site):\n if site == 'berk':\n files_od = glob(join(temp_dir,'outdoor','20*.xlsx'))\n elif site == 'bus':\n files_od = glob(join(temp_dir,'outdoor','Busara*.csv'))\n else:\n raise NameError(site)\n\n dfs = []\n for f in files_od:\n if site == 'berk':\n this_df = pd.read_excel(f,sheet_name=0,usecols='B:D',index_col=0,parse_dates=True, header=1)\n elif site == 'bus':\n this_df = pd.read_csv(f,usecols=[0,1,2],index_col=0,parse_dates=True,header=2)\n \n # drop missing values that prevented conversion to float type\n if this_df.iloc[:,0].dtype != np.float64:\n this_df = this_df[this_df.iloc[:,0] != ' ']\n this_df = this_df.astype(np.float64)\n\n # correct for weird timezones in berkeley datalogger\n this_df = correct_tz(this_df,site)\n \n this_df.columns = ['T','RH']\n this_df.index.name = 'time'\n\n # convert to celsius\n this_df['T'] = (this_df['T'] - 32) * 5/9\n dfs.append(this_df)\n \n df_od = pd.concat(dfs)\n\n # drop duplicated measurements\n df_od = df_od[~df_od.index.duplicated(keep='last')].sort_index()\n \n # separate out into daily min,mean,max\n groups = df_od.groupby(df_od.index.date)\n dfs_od = {'all':df_od,\n 'min': groups.min(),\n 'mean': groups.mean(),\n 'max': groups.max()}\n \n for i in ['min','mean','max']:\n # remove first and last day to ignore days where we did not get full recording\n dfs_od[i] = dfs_od[i].iloc[1:-1,:]\n \n # name index so that we can merge onto multiIndex'd dataframe\n dfs_od[i].index.name = 'date'\n \n return dfs_od", "def lessons_next_week(self, bot, update, group_name):\n week_number = self.week()\n week_number.next()\n\n bot.send_message(update.message.chat_id,\n text='`{}`\\n'.format(group_name) + self.timetable.lessons_week(group_name, week_number),\n parse_mode='Markdown')", "def __r7(soup):\n news = []\n ps = soup.find_all('p', class_='trends-thermometer-description')\n i = 0\n\n for p in ps:\n if i == 6: # Six trending topics\n break\n i += 1\n a = p.parent.parent.parent.a\n news.append(dict(title=a['title'], link=a['href']))\n return news", "def post_wednesday(reddit):\n\n politicians = open(POLITICIANS_FILE, \"r\",\n encoding=\"utf-8\").read().splitlines()\n\n for item in load_processed_politicians():\n politicians.remove(item)\n\n # If our pool is empty we reset the file and try again.\n if len(politicians) == 0:\n os.remove(PROCESSED_POLITICIANS_FILE)\n post_wednesday(reddit)\n\n selected_politician = random.choice(politicians)\n\n title = \"Discusiรณn Semanal - {}\".format(selected_politician)\n\n text = open(WEDNESDAY_TEMPLATE_FILE, \"r\",\n encoding=\"utf-8\").read().replace(\"%POLITICIAN%\", selected_politician)\n\n # Submit the text, sticky it and update the log.\n current_submission = reddit.subreddit(config.SUBREDDIT).submit(\n title=title, selftext=text)\n\n reddit.submission(current_submission).mod.sticky()\n update_log(WEDNESDAY_FILE, current_submission.id)\n update_processed_politicians(selected_politician)", "def get_data(weeks=0):\n return get_stravadata(weeks) + get_airdata(weeks)", "def load_data(city, month, day):\n df = pd.read_csv(CITY_DATA[city])\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n#changed 'weekday_name' to just 'weekday' which outputs the weekday as integer\n # extract month, day of week and hour from Start Time to create new columns\n df['month'] = df['Start Time'].dt.month\n df['dow'] = df['Start Time'].dt.weekday\n df['hour'] = df['Start Time'].dt.hour\n\n # filter by month if applicable\n if month != 'all':\n # use the index of the months list to get the corresponding int\n month = months.index(month) + 1\n # filter by month to create the new dataframe\n df = df[df['month'] == month]\n\n# problem with the 'day'-filter, if a day (not 'all') is applied, the output is not right\n # filter by day of week if applicable\n if day != 'all':\n\n # filter by day of week to create the new dataframe\n day = days.index(day) + 1\n df = df[df['dow'] == day]\n\n return df", "def current_week() -> int:\n now = datetime.now()\n return get_week_from_date(now)", "def nflweeklyleaders(self, irc, msg, args):\n \n url = self._b64decode('aHR0cDovL2VzcG4uZ28uY29tL25mbC93ZWVrbHkvbGVhZGVycw==')\n\n try:\n req = urllib2.Request(url)\n html = (urllib2.urlopen(req)).read()\n except:\n irc.reply(\"Failed to open: %s\" % url)\n return\n\n html = html.replace('class=\"oddrow','class=\"evenrow')\n\n soup = BeautifulSoup(html)\n weeklytitle = soup.find('h1', attrs={'class':'h2'}).renderContents().strip()\n tables = soup.findAll('table', attrs={'class':'tablehead'})\n\n object_list = []\n\n for table in tables:\n statcategory = table.find('tr', attrs={'class':'stathead'}).find('td')\n rows = table.findAll('tr', attrs={'class': re.compile('evenrow.*')})\n for row in rows:\n player = row.find('td', attrs={'align':'left'})\n team = player.findNext('td') \n d = collections.OrderedDict()\n d['category'] = statcategory.renderContents().strip()\n d['player'] = str(player.text.replace('.','. '))\n d['team'] = team.renderContents().strip()\n object_list.append(d)\n \n passinglist = []\n rushinglist = []\n receivinglist = []\n defensivelist = []\n\n for each in object_list:\n if each['category'] == \"Passing Leaders\":\n passinglist.append(each['player'] + \"(\" + each['team'] + \")\")\n if each['category'] == \"Rushing Leaders\":\n rushinglist.append(each['player'] + \"(\" + each['team'] + \")\")\n if each['category'] == \"Receiving Leaders\":\n receivinglist.append(each['player'] + \"(\" + each['team'] + \")\") \n if each['category'] == \"Defensive Leaders\":\n defensivelist.append(each['player'] + \"(\" + each['team'] + \")\")\n \n irc.reply(ircutils.mircColor(weeklytitle, 'red'))\n irc.reply(ircutils.bold(\"Passing Leaders: \") + string.join([item for item in passinglist], \" | \"))\n irc.reply(ircutils.bold(\"Rushing Leaders: \") + string.join([item for item in rushinglist], \" | \"))\n irc.reply(ircutils.bold(\"Receiving Leaders: \") + string.join([item for item in receivinglist], \" | \"))\n irc.reply(ircutils.bold(\"Defensive Leaders: \") + string.join([item for item in defensivelist], \" | \"))" ]
[ "0.6283611", "0.5888736", "0.5871395", "0.5666913", "0.5573738", "0.556567", "0.5563634", "0.5530871", "0.5527525", "0.54581827", "0.54551387", "0.54536134", "0.539935", "0.53837866", "0.53796595", "0.53618705", "0.5316805", "0.5285709", "0.52359337", "0.52143586", "0.52103406", "0.51956654", "0.51924855", "0.5173632", "0.51469505", "0.5118476", "0.5110601", "0.51083803", "0.51043826", "0.5104362", "0.509747", "0.5079097", "0.50387806", "0.5035801", "0.50350577", "0.50312716", "0.5024252", "0.50219977", "0.5018684", "0.5001042", "0.4987612", "0.49720526", "0.4970388", "0.4962528", "0.4945732", "0.49420083", "0.4937418", "0.49370292", "0.49192908", "0.49107704", "0.4910479", "0.4892671", "0.48886612", "0.48813245", "0.48680675", "0.48498365", "0.48459738", "0.48315302", "0.48271212", "0.4807666", "0.4807442", "0.48009232", "0.47911167", "0.47734705", "0.47629434", "0.47624645", "0.475789", "0.47544837", "0.47494507", "0.4748326", "0.47437066", "0.47435254", "0.47384375", "0.47347143", "0.4732327", "0.4717451", "0.47166944", "0.47127414", "0.4712081", "0.47119918", "0.4701451", "0.4701163", "0.46964097", "0.4695695", "0.4694135", "0.46901157", "0.468465", "0.46777958", "0.46760383", "0.4675732", "0.46732134", "0.4666648", "0.4656962", "0.46562505", "0.46474892", "0.4647167", "0.46421087", "0.46388444", "0.4632411", "0.4622007" ]
0.7170781
0
This function reads the temperature data
def read_temp(filename=None): if not filename: filename = settings.TEMP_FILENAME return pd.read_csv(filename, sep=';', parse_dates=[3], dtype={0: object, 2: object, 3: object})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_temperature():\n temp = 0.0\n with open(\"daily_temp.txt\", \"r\") as f:\n temp = float(f.readline())\n\n return temp", "def readtemperature(self, cTemp):\r\n\t\tdata = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_STATUS)\r\n\t\twhile (data & 0x01) != 0 :\r\n\t\t\tdata = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_STATUS)\r\n\t\tdata1 = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_DATAH)\r\n\t\tdata2 = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_DATAH)\r\n\t\t\r\n\t\t# Convert the data to 14-bits\r\n\t\tcTemp = (((data1 * 256.0) + data2) / 4.0)\r\n\t\t\r\n\t\tif cTemp < 0x0140 :\r\n\t\t\tcTemp = 0x0140\r\n\t\telif cTemp > 0x12C0 :\r\n\t\t\tcTemp = 0x12C0\r\n\t\telse :\r\n\t\t\tcTemp = cTemp\r\n\t\t\r\n\t\tcTemp = (cTemp / 32.0) - 50.0\r\n\t\tfTemp = cTemp * 1.8 + 32\r\n\t\t\r\n\t\treturn {'c' : cTemp, 'f' : fTemp}", "def read_temperature(self):\n tRaw = self._read_multiple_bytes_as_array(self.BME280_TEMP_MSB, 3)\n\n return float(self._compensate_temperature((tRaw[0] << 12) + (tRaw[1] << 4) + (tRaw[2] >> 4)))", "def _read_raw_temperature():\n with open(device_file, 'r') as f:\n content = f.readlines()\n return content", "def read_temperature(self):\n self._force_read(False)\n\n tempADC = (self._read_register_1ubyte(self.BME680_TEMP_MSB) << 12) | (self._read_register_1ubyte(self.BME680_TEMP_LSB) << 4) | (self._read_register_1ubyte(self.BME680_TEMP_XLSB) >> 4)\n\n return float(self._compensate_temperature(tempADC))", "def get_temperature(self): # This function implements the equations needed to convert the digital data to degrees celsius\n C_1, C_2, C_3, C_4, C_5, C_6=self.calibration_constants()\n self.digital_temp_data() \n dT = self.tempadc-(C_5*(2**8))\n temperature=(2000+(dT*(C_6/(2**23))))/100\n return temperature, dT", "def read_temperature(self):\n data = self.ag.read_bytes(Register.OUT_TEMP_L, 2)\n return lsm9ds1.to_int16(data)", "def getTemperatureMeasurements(self):\n # self.board.readline()\n self.stop = False\n times = []\n temps = [[], [], []]\n \n # A synchronisation string containing the characters tx is sent before each set of measurements,\n # we ensure correct reading of the measurements by waiting for this string\n while str(self.board.readline()).strip('b\\'\\\\rn') != 'tx':\n pass\n \n while not self.stop:\n # A synchronisation string containing the characters tx is sent before each set of measurements\n tx = self.board.readline()\n if str(tx).strip('b\\'\\\\rn') == 'tx':\n rawData1 = self.board.readline()\n rawData2 = self.board.readline()\n rawData3 = self.board.readline()\n rawData4 = self.board.readline()\n \n \n timeStamp = str(rawData1).strip('b\\'\\\\rn')\n temp1 = str(rawData2).strip('b\\'\\\\rn')\n temp2 = str(rawData3).strip('b\\'\\\\rn')\n temp3 = str(rawData4).strip('b\\'\\\\rn')\n try:\n times.append(float(timeStamp) / 1000)\n temps[0].append(float(temp1) / 128)\n temps[1].append(float(temp2) / 128)\n temps[2].append(float(temp3) / 128)\n # print(f'\\rtime: {float(timeStamp) / 1000:.2f} s, Temperature measured on sensor 1: {float(temp1) / 128:.2f} ยฐC,'\n # f'sensor 2: {float(temp2) / 128:.2f} ยฐC, sensor 3: {float(temp3) / 128:.2f} ยฐC', sep='', end='', flush=True)\n except:\n print(rawData1, rawData2, rawData3, rawData4)\n \n \n if self.stop:\n print('\\nMeasurement finished...')\n \n self.data_stack[self.fetch_kinds[0]] = times\n self.data_stack[self.fetch_kinds[1]] = temps[0]\n self.data_stack[self.fetch_kinds[2]] = temps[1]\n self.data_stack[self.fetch_kinds[3]] = temps[2]\n \n if (len(self.data_stack['Sensor 1 Temp']) != len(times) or len(self.data_stack['Sensor 2 Temp']) != len(times) or len(self.data_stack['Sensor 3 Temp']) != len(times)):\n print(\"Warning: There may be some missing values!\")", "def readTempSensor(sensorName) :\n f = open(sensorName, 'r')\n lines = f.readlines()\n f.close()\n return lines", "def temperature(self):\n return self.read_short(65) / 340.0 + 36.53", "def parse_temperature():\n content = _read_raw_temperature()\n\n # get last three characters of first line\n is_valid = content[0][-4:].strip()\n\n # convert to boolean\n is_valid = _validity_to_bool(is_valid)\n\n reading = content[1]\n reading = float(reading.split('=')[-1].strip()) / 1e3\n\n return is_valid, reading, dt.datetime.now()", "def temperature(self):\r\n self._read_temperature()\r\n return self._t_fine / 5120.0", "def get_temperature(self):\r\n\r\n\t# get current resolution\r\n\r\n\tconf = self.read_config()\r\n\tmask = 0x60 # 0110 0000\r\n\tres = conf & mask # extract resolution from config register\r\n\t# get temperature from register\r\n \r\n self.write('\\x00')\r\n data = self.read(2)\r\n t_raw = struct.unpack('>h', data)\r\n\tt_raw = t_raw[0]\r\n\r\n#\tmsb = 0b11110101\r\n#\tlsb = 0b11100000\r\n#\tdata = struct.pack('BB', msb, lsb)\r\n # t_raw = struct.unpack('>h', data)\r\n#\tt_raw = t_raw[0]\r\n#\tprint t_raw\r\n\t\r\n # return t_raw\r\n\t# t_raw = ((msb << 8) + lsb) # convert to 2 Byte Integer\r\n\r\n\tif (res == 0x00): # 9 bit resolution 0.5 degree\r\n\t print \"res: 0.5\"\r\n\t return (t_raw >> 7) * 0.5\r\n\r\n\tif (res == 0x20): # 10 bit resolution 0.25 degree\r\n\t print \"res: 0.25\"\r\n\t return (t_raw >> 6) * 0.25\r\n\r\n\tif (res == 0x40): # 11 bit resolution 0.125 degree\r\n\t print \"res: 0.125\"\r\n\t return (t_raw >> 5) * 0.125\r\n\r\n\tif (res == 0x60): # l2 bit resolution 0.0625 degree\r\n\t print \"res: 0.0625\"\r\n\t return (t_raw >> 4) * 0.0625", "def read_object_temperatureF(self, ):\n return self.read_object_temperatureC() * (9.0/5.0) + 32.0", "def get_temperature(self):\n pass", "def temperature(self):\n done, data = self._request('GP')\n if done:\n return {\n 'ds3231temp': float(data[0])/10,\n 'mcp9808temp': float(data[1])/10,\n 'tmp007temp': float(data[2])/10\n }\n\n raise EvseError", "def read_object_temperatureC(self, ):\n return self._read_temperature(MLX90614_TOBJ1)", "def f2c_file_read_function():\n with open('data.txt', 'r') as infile:\n data = [i.strip().split() for i in infile] # store data as list\n\n F = float(data[-1][-1]) # last item in data should be value\n C = 5/9.0*F - 32\n print(\"The temperatire in Celcius is {:g}\".format(C))", "def read_core_temp(self) -> float:", "def get_data(self):\n return DataGatherer().get_temperature_data()", "def __getRawTemperature(self):\n t1 = self.read_byte_data(self.address, 0x03)\n t2 = self.read_byte_data(self.address, 0x04)\n t3 = self.read_byte_data(self.address, 0x05)\n t = (t1 << 16) | (t2 << 8) | t3\n t = getTwosComplement(t, 24)\n return t", "def get_temperature(self):\n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... .. .*? .*? (.*?) .*? .*? . .*? .*? . . . .*?'\n temperature = float(re.findall(pattern,summary).pop())\n return temperature", "def get_temp(self):\n lines = self._get_temp_raw()\n\n while not self._is_successful_read(lines):\n time.sleep(0.2)\n lines = self._get_temp_raw()\n \n try: \n temp_file_location = lines[1].find('t=')\n except: \n print(\"ERROR: w1_slave file corrupted. No t= found.\")\n \n if temp_file_location is not -1:\n temp_string = lines[1][temp_file_location+2:]\n temp = float(temp_string) / 1000.0\n return temp", "def read_values(self):\n temp, acc, gyro = self.read_ag_data()\n tempc = lsm9ds1.TEMPC_0 + temp * lsm9ds1.TEMP_SENSOR_SCALE\n tempf = (tempc * 9/5) + 32\n acc = [c * lsm9ds1.ACC_SENSOR_SCALE for c in acc]\n gyro = [g * lsm9ds1.DPS_SENSOR_SCALE for g in gyro]\n return tempf, acc, gyro", "def temperature() -> float:", "def _get_temp_raw(self):\n try: \n f = open(self.device_file, 'r')\n lines = f.readlines()\n f.close()\n return lines\n\n except: \n print(\"ERROR: w1_slave file could not be opened (temp sensor)\")", "def read_data(self):\n temperature_data = RS485.read_temperature(self.data_path)\n humidity_data = RS485.read_humidity(self.data_path)\n moisture_data = RH_010_GN.read_moisture(self.data_path)\n o2_data = LB_856.read_o2(self.data_path)\n co2_data = LB_856.read_co2(self.data_path)\n\n self.data = [temperature_data, humidity_data, moisture_data, o2_data, co2_data]", "def _get_data(self):\n with open(self.filename, 'r') as fid:\n # we are not interested in the first line\n fid.readline()\n # second line\n line = fid.readline().strip()\n # the temperature is written in milli-degrees in the form\n # t=23456, but preceeded by a large HEX data dump in the form\n # 2c 00 4b 46 ff ff 0e 10 17 t=21875\n index = line.find('t=') + 2\n temperature = int(line[index:index + 6]) / 1e3\n time_now = self.get_timestamp()\n\n logging.debug(\n 'w1_temp: {0}, datetime: {1}, logger_id: {2}'.format(\n temperature,\n time_now,\n self.logger_id))\n\n ins = self.table(value=temperature,\n logger_id=self.logger_id,\n datetime=time_now)\n\n self.session.add(ins)\n self.session.commit()", "def temperatures():\n\n return station_9281", "def test_temperatures(get_touchmat):\n touchmat = get_touchmat\n\n temperatures = touchmat.temperatures()\n info = touchmat.info()\n check_system_types.check_TemperatureInfoList(temperatures, [info])", "def digital_temp_data(self): # This function will give the initial digital format for temperature data \n self._bus.write_byte(self._addr, 0x58) \n time.sleep(0.05) \n tempadcbytes = self._bus.read_i2c_block_data(self._addr, 0x00) \n time.sleep(0.05) \n self.tempadc=tempadcbytes[0]*65536.0+tempadcbytes[1]*256.0+tempadcbytes[2]", "def read_ambient_temperatureF(self, ):\n return self.read_ambient_temperatureC() * (9.0/5.0) + 32.0", "def get_temperature(data):\n celcius = 0\n celcius = [i for i in data if re.search(r'\\d+[/]', i)]\n \n if celcius == []:\n return \"N/A\"\n celcius = celcius[0].split('/')[0]\n celcius = celcius.replace('M', '-')\n \n try:\n celcius = int(celcius)\n except ValueError:\n return \"N/A\"\n\n farenheit = round((celcius * 9/5) + 32) # formula to get farenheit from celcius\n temperature = \"{0} C ({1} F)\".format(celcius, farenheit)\n return temperature", "def read_weather(self):\n print \"Reading weather data from file\",self.datafile\n tab = ascii.read(self.datafile)\n \n # Fix 'T' values in precipitation column, which represent tiny\n # amounts of rain (not measurable)\n TINY_VALUE = '.005' # 0.005 is half the smallest measurable value\n rain = tab['PrecipitationIn']\n wbad = (rain == 'T')\n rain[wbad] = TINY_VALUE\n rain = numpy.array(rain).astype(\"float\")\n\n # Replace string version of precip with float version\n tab['PrecipIn'] = rain\n tab.remove_column('PrecipitationIn')\n\n self.table = tab", "def test_device_readings_get_temperature(self):\n request = self.client().get('/devices/{}/readings/?type=temperature'.format(self.device_uuid))\n\n self.assertEqual(len(request.json), 3)", "def Temperature(self):\n try:\n self.__acqiris_QuantroDLL1.Temperature(self.__instrumentID,byref(self.__temperature),c_bool(True))\n except:\n print \"Could not read temperature\"\n self.__temperature=c_int32(-1)\n #self.notify(\"temperature\",self.__temperature.value) # possible automatic notification to a Frontpanel\n return self.__temperature.value", "def getHourlyTemp(self, keyword, scale):\n\n\t\tweather_data = self.getHourlyWeatherFromCSV(keyword, scale, \"temperature\")\n\t\ttemp_values = [] # Array that will contain all the temperature data\n\t\ttemp_data = {} # Dictionary of temperature data\n\n\t\t# Getting temperature data\n\t\tfor data in weather_data:\n\t\t\ttemp_data[\"x\"] = self.helper.getDateInEpoch(data[\"date\"])\n\t\t\ttemp_data[\"y\"] = float(data[\"temperature\"].split(\"ยฐ\")[0].split(\" \")[0])\n\t\t\ttemp_values.append(temp_data)\n\t\t\ttemp_data = {}\n\n\t\treturn temp_values", "def test_take_temperature_readings0001(self, platform):\n\n temps = platform.take_temperature_readings()\n assert type(temps) is dict\n assert all([x.startswith(\"hw.sensors.\") for x in temps.iterkeys()])\n # check temperature readings are within reasonable parameters\n assert all([type(v) == float for v in temps.itervalues()])\n assert all([10 <= v <= 120 for v in temps.itervalues()])", "def read(self):\n try:\n pressure, temperature=self.get_pressure()\n return pressure,temperature\n except Exception:\n logging.exception(\"Pressure Sensor Error\")", "def ReadData(self, tstep):\n fname = self.fname[tstep]\n t0 = self.tind[tstep]\n \n print 'Reading SUNTANS data at time: %s...'%datetime.strftime(self.timei[tstep],'%Y-%m-%d %H:%M:%S') \n nc = Dataset(fname)\n \n self.time = nc.variables['time'][t0]\n \n self.temp = nc.variables['temp'][t0,:,:]\n self.salt = nc.variables['salt'][t0,:,:]\n self.uc = nc.variables['uc'][t0,:,:]\n self.vc = nc.variables['vc'][t0,:,:]\n self.nu_v = nc.variables['nu_v'][t0,:,:]\n self.rho = nc.variables['rho'][t0,:,:]\n self.tau_x = nc.variables['tau_x'][t0,:]\n self.tau_y = nc.variables['tau_y'][t0,:]\n self.eta = nc.variables['eta'][t0,:]", "def get_chip_temperature(self):\n self.check_validity()\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_GET_CHIP_TEMPERATURE, (), '', 10, 'h')", "def temperature_sensor():\n\n\tsensor_name = \"humiture\"\n\treg_addr = 26\n\tdata_len = 4\n\tregist_sensor(sensor_name, reg_addr, data_len)\n\n\t# get sensor data\n\tdata = rospy.wait_for_message('MediumSize/SensorHub/Temperature', Temperature, 2)\n\ttemperature = data.temperature\n\n\tdelete_sensor(sensor_name)\n\treturn temperature", "def read_temp(self):\n return 19.0\n data = self.read(_TEMP_REG, 2)\n temp = ((data[0] * 256) + data[1]) / 16\n if temp > 2047:\n temp -= 4096\n return temp * 0.0625", "def get_temperature(self):\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_GET_TEMPERATURE, (), '', 'i')", "def read_T(self):\n # Read the temerature low-pass filtered with a time constant of 1000 milisecond\n tc = 1000\n raw_t = self._raw_T()\n t = time.ticks_ms()\n e = math.exp(time.ticks_diff(self._filter_time, t)/tc)\n self._filter = (e * self._filter) + ((1-e) * raw_t)\n self._filter_time = t\n return self._filter", "def read_ambient_temperatureC(self, ):\n return self._read_temperature(MLX90614_TA)", "def get_tank_temperature():\n if sensor is None:\n return DEFAULT_TEMPERATURE\n\n try:\n # Read the temperature from the I2C sensor.\n return sensor.read_temperature(True)\n except OSError:\n return DEFAULT_TEMPERATURE", "def parse_temperature(prod, regime, lines, data):\n for linenum, line in enumerate(lines):\n if len(line.strip()) < 18:\n continue\n # Repair a broken (E) product, see akrherz/pyIEM#08\n if line[20:23] == \"(E)\" and line[38] == \" \":\n prod.warnings.append(f\"Invalid line repaired |{line}|\")\n line = line.replace(\"(E)\", \"E \")\n tokens = make_tokens(regime, line)\n key = tokens[0].strip().lower()\n if key.upper() not in [\"MAXIMUM\", \"MINIMUM\", \"AVERAGE\"]:\n continue\n data[f\"temperature_{key}\"] = get_number(tokens[1])\n if tokens[2] is not None:\n data[f\"temperature_{key}_time\"] = tokens[2]\n if tokens[3] is not None:\n data[f\"temperature_{key}_record\"] = get_number(tokens[3])\n if tokens[4] is not None and tokens[4].strip() not in [\"\", \"M\", \"MM\"]:\n n = get_number_year(tokens[4])\n if n is not None:\n data[f\"temperature_{key}_record_years\"] = [n]\n else:\n prod.warnings.append(f\"Found invalid year |{tokens[4]}|\")\n if tokens[5] is not None:\n data[f\"temperature_{key}_normal\"] = get_number(tokens[5])\n # Check next line(s) for more years\n while (linenum + 1) < len(lines) and len(\n lines[linenum + 1].strip()\n ) == 4:\n line2 = lines[linenum + 1].strip()\n n = get_number_year(line2)\n if n is not None:\n data.setdefault(\n f\"temperature_{key}_record_years\",\n [],\n ).append(n)\n else:\n prod.warnings.append(f\"Found invalid year |{line2}|\")\n linenum += 1", "def get_temp(self):\n\t\traw_temp = self.read_i2c_word(self.TEMP_OUT0)\n\n\t\t# Get the actual temperature using the formule given in the\n\t\t# MPU-6050 Register Map and Descriptions revision 4.2, page 30\n\t\tactual_temp = (raw_temp / 340.0) + 36.53\n\n\t\treturn actual_temp", "def _get_thermal_zone_temperatures():\r\n temperatures = []\r\n for path in glob.glob('/sys/class/thermal/thermal_zone*/temp'):\r\n try:\r\n temperatures.append(\r\n bin_utils._get_float_from_file(path, 0, None, None) * 0.001)\r\n except IOError:\r\n # Some devices (e.g. Veyron) may have reserved thermal zones that\r\n # are not active. Trying to read the temperature value would cause a\r\n # EINVAL IO error.\r\n continue\r\n return temperatures", "def read_temp(self, ctrl_pin):\n bytes_ = self.read_bytes(ctrl_pin)\n int_ = struct.unpack('>H', bytes_)[0]\n if int_ & 0x04 > 1:\n temp_celsius = -1\n else:\n temp_celsius = (int_ >> 3) * 0.25\n return temp_celsius", "def get_temperature(self):\n \n # Get temp readings from both sensors\n humidity_temp = self._sense_hat.get_temperature_from_humidity()\n pressure_temp = self._sense_hat.get_temperature_from_pressure()\n \n # avg_temp becomes the average of the temperatures from both sensors\n # We need to check for pressure_temp value is not 0, to not ruin avg_temp calculation\n avg_temp = (humidity_temp + pressure_temp) / 2 if pressure_temp else humidity_temp\n \n # Get the CPU temperature\n cpu_temp = self._get_cpu_temp()\n \n # Calculate temperature compensating for CPU heating\n adj_temp = avg_temp - (cpu_temp - avg_temp) / 1.5\n \n # Average out value across the last three readings\n return self._get_smooth(adj_temp)", "def temperature(self):\n temp = ct.c_float()\n self.lib.GetTemperatureF(ct.pointer(temp))\n return temp.value", "def get_temperature_sensor(self) -> Tuple[str, int]:\n self.serial.write(b\"t!\")\n temp_sensor = self.__read_response(1)[0]\n if temp_sensor[0:3] == b\"!th\":\n temp_sensor = self.__extract_int(temp_sensor, b\"!th\")\n # if we get 65536, the sensor is not connected\n if temp_sensor == 65535:\n raise CloudWatcherException(\n \"High precision RHEL/temp sensor not connected\"\n )\n return \"th\", temp_sensor\n else:\n temp_sensor = self.__extract_int(temp_sensor, b\"!t\")\n # if we get 100, the sensor is not connected\n if temp_sensor == 100:\n raise CloudWatcherException(\n \"Low precision RHEL/temp sensor not connected\"\n )\n return \"t\", temp_sensor", "def get_temp():\n count = 0\n while True:\n # Temp\n output = subprocess.check_output(\n [\"/home/andy/python/bitbucket/pitemp/Adafruit_DHT\", \"2302\", \"4\"])\n count += 1\n print (\"Attempt %s: %s\") % (count, output)\n temp_match = re.search(\"Temp =\\s+([0-9.]+)\", output)\n humid_match = re.search(\"Hum =\\s+([0-9.]+)\", output)\n\n # if the beginning of output contains temp and numbers,\n # we can assume we are getting valid data\n if temp_match:\n temp = float(temp_match.group(1))\n humidity = float(humid_match.group(1))\n break\n\n return (temp, humidity)", "def process_temp_data(self, data):\n\n # Temperatures have the following syntax:\n # name=TempX,serial_num=blahblah,value=50,units=F\n sensor_details = data.split(',')\n serial_num = sensor_details[1].split('=')[1]\n value = sensor_details[2].split('=')[1]\n\n # ard_data has the following syntax\n # self.ard_data['tempsensors'][t.name]['serial_num']\n\n for tname in self.ard_data['tempsensors'].keys():\n if self.ard_data['tempsensors'][tname]['serial_num'] == serial_num:\n self.ard_data['tempsensors'][tname]['value'] = value", "def temperature(self) -> Optional[float]:\n return self.data.get(\"temp\")", "def temperature_f(self, tuple_data, status):\r\n fahr_search = Temperature.fahr.search(status)\r\n temperature = None\r\n try:\r\n if fahr_search != None:\r\n temperature = fahr_search.group(2).replace(\",\", \".\")\r\n temperature = float(temperature)\r\n else:\r\n celcius_search = Temperature.celcius.search(status)\r\n if celcius_search != None:\r\n temperature = celcius_search.group(2).replace(\",\", \".\")\r\n temperature = float(temperature)\r\n temperature = ((9.0/5) * temperature) + 32\r\n except ValueError:\r\n print \"Encoding error on '%s'\" % (status)\r\n return temperature", "def temperature():\n\tsensors = commands.getstatusoutput('sensors -u | grep -E temp[0-9]_input')\n\n\tif sensors[0] == 1:\n\t\traise Exception('lm-sensors is not setup. Run sensors-detect')\n\n\tif sensors[0] == 127:\n\t\traise Exception('lm-sensors is not installed')\n\n\ttemps = re.findall(r\"(\\d{2}.\\d+)\",sensors[1],re.M)\n\n\tif not temps:\n\t\traise Exception('No temperature sensors found')\n\n\tfor i,temp in enumerate(temps):\n\t\ttemps[i] = float(temp)\n\t\ttemps[i] = int(temps[i])\n\n\treturn max(temps)", "async def async_get_temperature(self):\n if self.token is None:\n await self.async_initialize_token()\n\n self.temperature = None\n raw = await self._async_ws_get_function(CMD_TEMPERATURE)\n\n f_to_c = lambda f: (5.0 / 9) * (f - 32)\n try:\n xml_root = element_tree.fromstring(raw)\n self.temperature = Temperature(\n tunerTemperature=f_to_c(int(xml_root.find(\"TunnerTemperature\").text)),\n temperature=f_to_c(int(xml_root.find(\"Temperature\").text)),\n )\n except (element_tree.ParseError, TypeError):\n _LOGGER.warning(\"Can't read temperature from %s\", self.host)\n self.token = None\n raise exceptions.ConnectBoxNoDataAvailable() from None", "def Get_Vital_Temp(raw_data,\n temp_startpos,\n temp_endpos):\n temp_ = raw_data[temp_startpos:temp_endpos]\n temp_ = temp_[2:4] + temp_[0:2]\n print(f'| raw_temp = {temp_}')\n temperature = Convert_Hex_To_Decimal(temp_) / 10\n return temperature", "def getTemperature(self):\n return self.temperature", "def check_temperature(self):\n if self.type == 'DS18B20':\n if self.unit == 'C':\n temperature = int(self.temp_sensor.get_temperature(W1ThermSensor.DEGREES_C))\n else:\n temperature = int(self.temp_sensor.get_temperature(W1ThermSensor.DEGREES_F))\n humidity = -999\n if (self.type == 'DHT11') or (self.type == 'DHT22'):\n for i in range(5): # try for valid readings 5 times; break if valid\n try:\n if self.unit == 'C':\n temperature = self.temp_sensor.temperature\n else:\n temperature = self.temp_sensor.temperature * (9 / 5) + 32\n temperature = float(format(temperature, '.1f'))\n humidity = self.temp_sensor.humidity\n humidity = float(format(humidity, '.1f'))\n break # break out of for loop if got valid readings\n except RuntimeError:\n sleep(3) # wait 3 seconds and try again\n pass # this will retry up to 5 times before exiting the for loop\n\n if abs(temperature - self.last_reading_temp) >= self.min_difference:\n # temperature has changed from last reported temperature, therefore\n # send an event message reporting temperature by appending to send_q\n temp_text = str(temperature) + \" \" + self.unit\n text = '|'.join([self.event_text, 'Temp', temp_text])\n text_and_image = (text, self.tiny_image)\n self.send_q.append(text_and_image)\n self.last_reading_temp = temperature\n if abs(humidity - self.last_reading_humidity) >= self.min_difference:\n # humidity has changed from last reported humidity, therefore\n # send an event message reporting humidity by appending to send_q\n humidity_text = str(humidity) + \" %\"\n # Spelling of humidity all lower case is intentional to avoid\n # first letter test of \"Heartbeat\" in imagehub\n text = '|'.join([self.event_text, 'humidity', humidity_text])\n text_and_image = (text, self.tiny_image)\n self.send_q.append(text_and_image)\n self.last_reading_humidity = humidity", "def temperature(self):\n self.convert_window(\"Temperature\", \"Celsius\", [\"Celsius\", \"Fahrenheit\", \"Kelvin\", \"Rankine\", \"Reaumur\", \"Newton\", \"Romer\", \"Delisle\"])", "def test_temperatures_when_data_present(self):\n\n temp_data = [(1.00, time.localtime()), (2.00, time.localtime()),\n (3.00, time.localtime()), (4.00, time.localtime())]\n\n tt = TemperatureTracker(temp_data)\n result = tt.temperatures()\n for i in range(0, len(result)):\n self.assertEqual(result[i][0], temp_data[i][0])\n self.assertEqual(result[i][1], temp_data[i][1])", "def temperature(self):\n names = ['anc_air_temperature']\n return self.sensor.get_with_fallback('temperature', names)", "def read_temp(temp):\n type_dict = {\"string\": str, \"unknown\": str, \"numeric\": float}\n with open(temp, 'r') as topen:\n feature_lines = topen.readlines()\n feature_labels = []\n feature_types = []\n for i, row in enumerate(feature_lines):\n if row.startswith(\"@attribute\"):\n flabel, ftype = row[11:-1].split(' ')\n feature_labels.append(flabel)\n feature_types.append(type_dict[ftype])\n elif row.startswith(\"@data\"):\n feature_values = feature_lines[i+1].split(\",\")\n if len(feature_values) < len(feature_labels):\n feature_values = feature_lines[i+2].split(\",\")\n for i, item in enumerate(feature_values):\n try:\n feature_values[i] = (feature_types[i](item))\n except:\n feature_values[i] = item\n return(dict(zip(feature_labels, feature_values)))", "def getTemperature(self):\n with self.lock:\n temp = self.temp\n return temp", "def read_raw_data(self):\n dat_file = os.path.join(DATA_DIR, self.patient_number + '.txt')\n if not os.path.exists(dat_file):\n raise AssertionError(\"{} doesn't exist.\".format(dat_file))\n time = []\n voltage1 = []\n voltage2 = []\n with open(dat_file, 'r') as fd:\n for line in fd:\n line = line.split()\n time.append(line[0])\n voltage1.append(float(line[1]))\n voltage2.append(float(line[2]))\n\n tags_file = os.path.join(DATA_DIR, self.patient_number + '_tag.txt')\n if not os.path.exists(dat_file):\n raise AssertionError(\"{} doesn't exist.\".format(tags_file))\n tags_time = []\n tags = []\n r_peaks_indexes = []\n with open(tags_file, 'r') as fd:\n for line in fd:\n line = line.split()\n tags_time.append(line[0])\n tags.append(line[2])\n r_peaks_indexes.append(int(line[1]))\n return time, voltage1, voltage2, tags_time, tags, r_peaks_indexes", "async def get_temperatures(self, **kwargs: Any) -> Dict[str, float]:\n ...", "def temperature():\n from .imperial import deg_F as F\n from .imperial import deg_R as R\n\n K = si.K\n C = si.deg_C\n\n return Equivalency(\n [\n (K, C, lambda x: x - 273.15, lambda x: x + 273.15),\n (C, F, lambda x: x * 1.8 + 32.0, lambda x: (x - 32.0) / 1.8),\n (K, F, lambda x: x * 1.8 - 459.67, lambda x: (x + 459.67) / 1.8),\n (R, F, lambda x: x - 459.67, lambda x: x + 459.67),\n (R, C, lambda x: (x - 491.67) * (5 / 9), lambda x: x * 1.8 + 491.67),\n (R, K, lambda x: x * (5 / 9), lambda x: x * 1.8),\n ],\n \"temperature\",\n )", "def writetemperature(self):\r\n\t\tTEMP_CONFIG = (SI7015_REG_CONFIG_CNVRSN_ON | SI7015_REG_CONFIG_TEMP)\r\n\t\tbus.write_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_CONFIG, TEMP_CONFIG)", "def get_temperature(self, day):\r\n return self.temperatures[day]", "def read_w1(onewire_temperature_c, sensor_mappings):\n base_dir = '/sys/bus/w1/drivers/w1_slave_driver/'\n\n # Get our device:\n path_mappings = {}\n for (directory, dirs, files) in os.walk(base_dir):\n for dev_dir in dirs:\n try:\n #id_file = open('{0}/{1}/id'.format(base_dir, dev_dir), 'r')\n #id_val = id_file.read().encode('hex').upper()\n id_val = dev_dir\n #id_file.close()\n therm_file = open('{0}/{1}/w1_slave'.format(base_dir, dev_dir), 'r')\n path_mappings[id_val] = therm_file\n except (OSError, IOError) as e:\n print('Skipping {0} due to error: {1}'.format(dev_dir, str(e)), file=sys.stderr)\n break\n\n while 1:\n for device_id, therm_file in path_mappings.items():\n therm_contents = therm_file.read()\n therm_file.seek(0)\n\n m = re.search(r't=(-?\\d+)$', therm_contents)\n if m:\n temperature = (float(m.group(1)) / 1000)\n # A reading of 85000 seems to mean \"it's not working\". If you actually want to\n # measure things that are 85ยฐC, then my apologies.\n if temperature != 85:\n onewire_temperature_c.labels(location=sensor_mappings[device_id]).set(temperature)\n\n time.sleep(1)", "def raw_sensor_temp(self):\n\n # return the value in millicelsius\n return float(self.raw_sensor_strings[1].split(\"=\")[1])", "def temperature(self):\n return self._temperature", "def temperature(self):\n return self._temperature", "def temp(self):\n\t\ttemp_out = self.read16(MPU9250_ADDRESS, TEMP_DATA)\n\t\ttemp = temp_out / 333.87 + 21.0 # these are from the datasheets\n\t\treturn temp", "def get_external_temp():\n baseurl = \"http://api.openweathermap.org/data/2.5/weather\"\n query = \"?q=salhouse&mode=xml\"\n url = baseurl + query\n r = requests.get(url)\n root = ET.fromstring(r.text)\n kelvin = float(root[1].attrib.get('value'))\n celcius = kelvin - 272.15\n return celcius", "def get_temperature(self, sensor):\n if sensor >= self.num_temperatures or sensor < 0:\n raise I2CException('Illegal sensor index {} specified'.format(sensor))\n\n return self.__temperature_values[sensor]", "def get_temperature(self, unit='C'):\n data = self.get_data()\n return convert_temperature(data['temperature'], unit=unit)", "def read(self, read_raw=None):\n if read_raw is None:\n pres_raw, temp_raw, hum_raw = self.read_raw()\n else:\n pres_raw, temp_raw, hum_raw = read_raw\n\n t_fine_in = -self._raw_to_t_fine(temp_raw)\n temp = self.raw_to_calibrated_temp(t_fine_in)\n pres = self.raw_to_calibrated_pressure(pres_raw, t_fine_in)\n hum = self.raw_to_calibrated_humidity(hum_raw, t_fine_in)\n return pres, temp, hum", "def present_temperature(self):\n return self._read(MX_PRESENT_TEMPERATURE)", "def get_sensors_data(self):\n\n temp_in_celsius = self.get_temperature()\n\n return (\n round(temp_in_celsius, 1), \n round(self.to_fahrenheit(temp_in_celsius), 1), \n round(self.get_humidity(), 0), \n round(self.get_pressure(), 1)\n )", "def _read_calibration_data(self):\n #Declare global variables.\n global calT1\n global calT2\n global calT3\n global calP1\n global calP2\n global calP3\n global calP4\n global calP5\n global calP6\n global calP7\n global calP8\n global calP9\n global calP10\n global calH1\n global calH2\n global calH3\n global calH4\n global calH5\n global calH6\n global calH7\n global calGH1\n global calGH2\n global calGH3\n global calResHeatRange\n global calResHeatVal\n global calRangeSwErr\n\n #Temperature calibration.\n calT1 = self._read_2bytes_as_ushort_lsbfirst(self.BME680_T1_LSB_REG)\n calT2 = self._read_2bytes_as_short_lsbfirst(self.BME680_T2_LSB_REG)\n calT3 = self._read_register_1sbyte(self.BME680_T3_REG)\n\n #Pressure calibration.\n calP1 = self._read_2bytes_as_ushort_lsbfirst(self.BME680_P1_LSB_REG)\n calP2 = self._read_2bytes_as_short_lsbfirst(self.BME680_P2_LSB_REG)\n calP3 = self._read_register_1sbyte(self.BME680_P3_REG)\n calP4 = self._read_2bytes_as_short_lsbfirst(self.BME680_P4_LSB_REG)\n calP5 = self._read_2bytes_as_short_lsbfirst(self.BME680_P5_LSB_REG)\n calP6 = self._read_register_1sbyte(self.BME680_P6_REG)\n calP7 = self._read_register_1sbyte(self.BME680_P7_REG)\n calP8 = self._read_2bytes_as_short_lsbfirst(self.BME680_P8_LSB_REG)\n calP9 = self._read_2bytes_as_short_lsbfirst(self.BME680_P9_LSB_REG)\n calP10 = self._read_register_1ubyte(self.BME680_P10_REG)\n\n #Humidity calibration.\n calH1 = self._read_register_1ubyte(self.BME680_H1_MSB_REG) << 4 | (self._read_register_1ubyte(self.BME680_H1_LSB_REG) & 0x0F)\n calH2 = self._read_register_1ubyte(self.BME680_H2_MSB_REG) << 4 | ((self._read_register_1ubyte(self.BME680_H2_LSB_REG)) >> 4)\n calH3 = self._read_register_1sbyte(self.BME680_H3_REG)\n calH4 = self._read_register_1sbyte(self.BME680_H4_REG)\n calH5 = self._read_register_1sbyte(self.BME680_H5_REG)\n calH6 = self._read_register_1ubyte(self.BME680_H6_REG)\n calH7 = self._read_register_1sbyte(self.BME680_H7_REG)\n\n #Gas calibration.\n calGH1 = self._read_register_1sbyte(self.BME680_GH1_REG)\n calGH2 = self._read_2bytes_as_short_lsbfirst(self.BME680_GH2_LSB_REG)\n calGH3 = self._read_register_1sbyte(self.BME680_GH3_REG)\n\n #Heat calibration.\n calResHeatRange = (self._read_register_1ubyte(self.BME680_RES_HEAT_RANGE) & 0x30) / 16\n calResHeatVal = self._read_register_1sbyte(self.BME680_RES_HEAT_VAL)\n calRangeSwErr = (self._read_register_1sbyte(self.BME680_RANGE_SW_ERR) & 0xF0) / 16", "def read_tph(self):\n resultsTPH = [ 0.0, 0.0, 0.0 ]\n\n self._force_read(False)\n\n tempADC = (self._read_register_1ubyte(self.BME680_TEMP_MSB) << 12) | (self._read_register_1ubyte(self.BME680_TEMP_LSB) << 4) | (self._read_register_1ubyte(self.BME680_TEMP_XLSB) >> 4)\n presADC = (self._read_register_1ubyte(self.BME680_PRESS_MSB) << 12) | (self._read_register_1ubyte(self.BME680_PRESS_LSB) << 4) | (self._read_register_1ubyte(self.BME680_PRESS_XLSB) >> 4)\n humADC = (self._read_register_1ubyte(self.BME680_HUM_MSB) << 8) | (self._read_register_1ubyte(self.BME680_HUM_LSB))\n\n resultsTPH[0] = float(self._compensate_temperature(tempADC))\n resultsTPH[1] = float(self._compensate_pressure(presADC))\n resultsTPH[2] = float(self._compensate_humidity(humADC))\n\n return resultsTPH", "def _extract_raw_data(self, lines):\r\n\r\n i = self._find_first_data_point(lines)\r\n if self._lines_have_temperature(lines[i]):\r\n self._T = []\r\n\r\n if self._has_drift_points(lines):\r\n while i < len(lines) and lines[i][0] in ['+', '-']:\r\n self._extract_drift_point(lines[i])\r\n i += 2\r\n i += self._extract_next_forc(lines[i:])\r\n i += 1\r\n else:\r\n while i < len(lines) and lines[i][0]in ['+', '-']:\r\n i += self._extract_next_forc(lines[i:])\r\n self._extract_drift_point(lines[i-1])\r\n i += 1\r\n\r\n return", "def temperature(self):\r\n try:\r\n return str(self.connect()['main']['temp'])\r\n except:\r\n return '@weather_temperature'", "def read_data(self, loc):\n pass", "def read_tph(self):\n resultsTPH = [ 0.0, 0.0, 0.0 ]\n\n tRaw = self._read_multiple_bytes_as_array(self.BME280_TEMP_MSB, 3)\n pRaw = self._read_multiple_bytes_as_array(self.BME280_PRESS_MSB, 3)\n hRaw = self._read_multiple_bytes_as_array(self.BME280_HUM_MSB, 2)\n\n resultsTPH[0] = float(self._compensate_temperature((tRaw[0] << 12) + (tRaw[1] << 4) + (tRaw[2] >> 4)))\n resultsTPH[1] = float(self._compensate_pressure((pRaw[0] << 12) + (pRaw[1] << 4) + (pRaw[2] >> 4)))\n resultsTPH[2] = float(self._compensate_humidity(hRaw[0] << 8) + hRaw[1])\n\n return resultsTPH", "def getCl(filename):\n powSpec = pf.getdata(filename,1)\n temps = powSpec.field('TEMPERATURE')\n ell = np.arange(temps.size)\n return ell,temps", "def readhumidity(self, cTemp):\r\n\t\tdata = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_STATUS)\r\n\t\twhile (data & 0x01) != 0 :\r\n\t\t\tdata = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_STATUS)\r\n\t\tdata1 = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_DATAH)\r\n\t\tdata2 = bus.read_byte_data(SI7015_DEFAULT_ADDRESS, SI7015_REG_DATAH)\r\n\t\t\r\n\t\t# Convert the data to 12-bits\r\n\t\thumidity = ((data1 * 256 + (data2 & 0xF0)) / 16.0)\r\n\t\t\r\n\t\tif humidity < 0x180 :\r\n\t\t\thumidity = 0x180\r\n\t\telif humidity > 0x7C0 :\r\n\t\t\thumidity = 0x7C0\r\n\t\telse :\r\n\t\t\thumidity = humidity\r\n\t\t\r\n\t\thumidity = (humidity / 16.0) - 24.0\r\n\t\tlinearhumidity = humidity - (((humidity * humidity) * (-0.00393)) + (humidity * 0.4008) - 4.7844)\r\n\t\ttempcomphumidity = linearhumidity + ((cTemp - 30.00) * (linearhumidity * 0.00237 + 0.1973))\r\n\t\t\r\n\t\treturn {'h' : humidity, 'l' : linearhumidity, 't' : tempcomphumidity}", "def get_list_temperature(self,typ,file_number):\n if typ == 'emis':\n return self.beam_emis[file_number].temperature\n elif typ == 'atte':\n return self.beam_atte[file_number].temperature\n else:\n raise NameError('No list with this name: {0}'.format(typ))", "def temperature(self) -> float:\n # Start a measurement then poll the measurement finished bit.\n self.temp_start = 1\n while self.temp_running > 0:\n pass\n # Grab the temperature value and convert it to Celsius.\n # This uses the same observed value formula from the Radiohead library.\n temp = self._read_u8(_REG_TEMP2)\n return 166.0 - temp", "def isTemperature(obxDict):\n readingCode = getReadingCode(obxDict)\n return readingCode == '8310-5'", "def get_temp(self, temp_number: int) -> Temperature:\n return Temperature(self.api, temp_number)", "def get_cpu_temps(architecture):\n try:\n package_temp = None\n core_temps = []\n cpu_hwmon_path = get_hwmon_dir(architecture)\n if(architecture == \"x86_64\"):\n labels = glob(cpu_hwmon_path + \"/temp*_label\")\n labels.sort()\n for path in labels:\n label_file = open(path, \"r\")\n label = label_file.read().strip()\n temperature_file_path = path[:29] + \"_input\" #should end up with something like \n #\"/sys/class/hwmon/hwmonX/tempY_input\" for this label\n temperature_file = open(temperature_file_path, \"r\")\n temperature = float(temperature_file.read().strip()) / 1000\n temperature_file.close()\n if(\"Package\" in label):\n # ret[0] = temperature\n package_temp = temperature\n else: #this is (probably) for a core.\n core_temps.append(temperature)\n label_file.close()\n return (package_temp, core_temps)\n elif(architecture == \"aarch64\"):\n temperature_file = open(cpu_hwmon_path + \"/temp1_input\", \"r\")\n temperature = float(temperature_file.read().strip()) / 1000\n temperature_file.close()\n return (temperature, [])\n else: \n # This should be unreachable because of the architecture check in cpu_monitor(), but just in case things \n # break this happens, an error message is better than an error from trying to serialize None or something \n # like that.\n logerr(\"get_cpu_temps(): architecture unsupported.\")\n return (float(\"NaN\"), [])\n except: #there is a lot of stuff that can break in the above block...\n logerr(\"unable to get CPU temperature data\")\n return (float(\"NaN\"), []) #was previously None; had to be changed because it must be serializable as a float.", "def get_temperature(self):\n self.temperature = self.temperature_sensors.get_temperature(\n self.channel)\n return self.temperature", "def getTemperature(self):\n return self.json_state.get(\"temperature\")", "def getTemperature(self, lat, lon, alt, time):\n raise NotImplementedError(\n \"getTemperature method must be implemented by class {}\".format(\n type(self).__name__))", "def readfile(self, path, filename):\n # The DataStudio software uses ISO-8859-1 encoding (especially for the degree sign in temperature files)\n file = open(path + filename, encoding=\"iso-8859-1\")\n rowlist = file.readlines()\n\n title = rowlist[0].strip(\"\\n\")\n labels = rowlist[1].strip(\"\\n\").split(sep=\"\\t\")\n\n data = np.zeros((len(rowlist)-2, 2))\n\n for i in range(2, len(rowlist)):\n columns = rowlist[i].split(sep=\"\\t\")\n data[i-2, 0] = float(columns[0].replace(\",\", \".\"))\n data[i-2, 1] = float(columns[1].replace(\",\", \".\"))\n\n return data, title, labels" ]
[ "0.7990609", "0.7562184", "0.7501336", "0.7451494", "0.7336076", "0.7219707", "0.71506214", "0.7078098", "0.70765", "0.7061153", "0.70357513", "0.7020464", "0.6986411", "0.68525434", "0.6827719", "0.68100756", "0.67881316", "0.676591", "0.672111", "0.6678112", "0.66756356", "0.6659917", "0.6631733", "0.6629762", "0.6625357", "0.6618899", "0.66133237", "0.6605928", "0.65986174", "0.65851253", "0.657119", "0.657034", "0.6567739", "0.65606153", "0.6560598", "0.65573114", "0.65340704", "0.65316045", "0.65150154", "0.6512187", "0.6509421", "0.6498587", "0.64876676", "0.64362234", "0.6394243", "0.63730055", "0.6362261", "0.6354606", "0.6343362", "0.6334785", "0.6334244", "0.63331044", "0.6327594", "0.6318965", "0.63182175", "0.63063127", "0.62927914", "0.627397", "0.6271738", "0.626381", "0.6260356", "0.6255927", "0.6241959", "0.62342334", "0.62268853", "0.6220443", "0.6210241", "0.62044793", "0.620179", "0.61969304", "0.61964554", "0.6169565", "0.6167985", "0.61601526", "0.61542046", "0.61385226", "0.61385226", "0.6136313", "0.6131628", "0.6127211", "0.6113858", "0.61095107", "0.61034644", "0.6097106", "0.60968095", "0.60956603", "0.60870016", "0.6086174", "0.60852236", "0.6081212", "0.6076991", "0.6071932", "0.60689706", "0.60578036", "0.6055112", "0.60498065", "0.6046265", "0.6031425", "0.60301226", "0.6021592", "0.60048974" ]
0.0
-1
This function gives us all the weather information/prediction for a given day in a given store
def get_weather_on_date(date, meteo_day, store_id): return meteo_day[(meteo_day['STO_EAN'] == store_id) & (meteo_day['DATE_KEY'] == date)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_weather_details(self, days: int = None):\n forecast = super().get_weather_forecast(self.BASE_URL)\n headers = [\n \"date\",\n \"min_temp\",\n \"max_temp\",\n \"phrase\",\n \"probability\",\n \"wind_speed\"]\n if days is None:\n days = 5\n for number in range(days):\n data = []\n date = forecast[\"DailyForecasts\"][number]['Date']\n date = date[:10]\n data.append(date)\n min_temp = round((int(\n (forecast[\"DailyForecasts\"][number][\"Temperature\"]\n [\"Minimum\"][\"Value\"])) - 32) / 1.8)\n data.append(min_temp)\n max_temp = round((int(\n (forecast[\"DailyForecasts\"][number][\"Temperature\"]\n [\"Maximum\"][\"Value\"])) - 32) / 1.8)\n data.append(max_temp)\n phrase = forecast[\"DailyForecasts\"][number][\"Day\"][\"LongPhrase\"]\n data.append(phrase)\n probability = (forecast[\"DailyForecasts\"][number][\"Day\"]\n [\"RainProbability\"])\n data.append(probability)\n wind_speed = round(int(\n (forecast[\"DailyForecasts\"][number][\"Day\"][\"Wind\"][\"Speed\"]\n [\"Value\"]) / 1.6), 1)\n data.append(wind_speed)\n yield dict(zip(headers, data))", "def forecast_weather(self):\n pass", "def get_weather_data(weather_station):\n now = datetime.datetime.now()\n then = now - datetime.timedelta(days=7)\n\n query_date_start = (\"%d%02d%02d\" % (then.year, then.month, then.day))\n query_date_end = (\"%d%02d%02d\" % (now.year, now.month, now.day))\n\n api_key = '/api/%s' % WUNDERGROUND_KEY\n history_key = '/history_%s%s/lang:EN/units:english/bestfct:1/v:2.0' % (query_date_start, query_date_end)\n query = '/q/%s.json?showObs=0&ttl=120' % weather_station\n\n weather_url = (\"%s%s%s%s\" % (WUNDERGROUND_HOST, api_key, history_key, query))\n\n logger.info('Weather URL: %s', weather_url)\n response = requests.get(weather_url).text\n\n max_temp_avg = json.loads(response)['history']['summary']['max_temperature_avg']\n sum_precip = json.loads(response)['history']['summary']['precip_sum']\n\n return max_temp_avg, sum_precip", "def getDailyWeather(self, keyword, temp):\n\n\t\t# Variables\n\t\tdaily_weather = []\n\t\tweather = {}\n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\n\t\t# Getting 4-day forecast, storing each day's data in a dictionary and\n\t\t# storing each dictionary in an array\n\t\tif fio.has_daily() is True:\n\t\t\tdaily = FIODaily.FIODaily(fio)\n\t\t\tfor day in xrange(0, 4):\n\t\t\t\tfor item in daily.get_day(day).keys():\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"temperatureMax\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\t\n\t\t\t\t\tif item == \"temperatureMin\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\t\tif item == \"time\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tdaily_weather.append(weather)\n\t\t\t\tweather = {}\n\t\telse:\n\t\t\treturn 'No Daily data'\n\t\treturn daily_weather", "def get_zakopane_daily_weather():\n zakopane = FiveDaysWeatherForecast(location.get(\"zakopane\", \"\"))\n zakopane_weather_detail = zakopane.get_weather_details()\n zakopane_daily_weather_detail = []\n for data in zakopane_weather_detail:\n zakopane_daily_weather_detail.append(data)\n return zakopane_daily_weather_detail", "def getTodaysWeather(self, keyword, temp):\n\n\t\t# Variables\n\t\tweather = {} \n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\t\t\n\t\t# Getting todays weather data and populating the dictionary\n\t\tif fio.has_daily() is True and fio.has_hourly() is True:\n\t\t daily = FIODaily.FIODaily(fio)\n\t\t hourly = FIOHourly.FIOHourly(fio)\n\t\t for day in xrange(0, 1):\n\t\t\t\tfor item in daily.get_day(day).keys():\n\t\t\t\t\tif item == \"temperatureMin\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"temperatureMax\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"windSpeed\":\n\t\t\t\t\t\twindSpeed = unicode(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"windBearing\":\n\t\t\t\t\t\twindBearing = unicode(daily.get_day(day)[item])\n\t\t\t\t\t\twindBearing = self.helper.convertWindBearing(windBearing)\n\t\t\t\t\tif item == \"sunsetTime\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"sunriseTime\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(daily.get_day(day)[item])\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(daily.get_day(day)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tweather[\"wind\"] = windBearing + \" \" + windSpeed + \" mph\"\n\t\t\t\tfor item in hourly.get_hour(day).keys():\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[\"current\"] = unicode(hourly.get_hour(0)[item])\n\t\t\t\t\tif item == \"temperature\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(0)[item]).split(\".\")[0]\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(0)[item])\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(0)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\tweather[\"town\"] = self.helper.getCoords(keyword)[2]\n\t\telse:\n\t\t\treturn 'No Todays data'\n\n\t\treturn weather", "def get_data(link):\n data = re.get(link)\n jsondata = data.json()\n for weatherstation in jsondata['weatherStations']:\n FetchandStore.sensordict.update({weatherstation[\"id\"]:weatherstation[\"sensorValues\"]})\n for sensorvalue in weatherstation[\"sensorValues\"]:\n FetchandStore.sensors.append({\"id\": sensorvalue[\"roadStationId\"], \"name\": sensorvalue[\"oldName\"],\n \"value\": sensorvalue[\"sensorValue\"], \"unit\": sensorvalue[\"sensorUnit\"],\n \"datetime\": sensorvalue[\"measuredTime\"]})\n return FetchandStore.sensors", "def get_weather_data(lat='40.761440',lng='-73.981806'):\r\n key ='********************************'\r\n x = pd.DataFrame()\r\n unix_now = int((dt.datetime.now()- dt.datetime(1970,1,1)).total_seconds())\r\n for time in range(unix_now-86400, unix_now+604800, 86400):\r\n rsp = rq.get('https://api.darksky.net/forecast/{}/{},{},{}'.format(key, lat, lng, time))\r\n rsp_json = json.loads(rsp.text)\r\n row = json_normalize(rsp_json[\"daily\"]['data'])\r\n x = x.append(row)\r\n \r\n x = x[['icon','apparentTemperatureHigh','apparentTemperatureLow','cloudCover','humidity','precipProbability',\r\n 'pressure','visibility','windBearing','windGust','windSpeed']].reset_index(drop=True)\r\n return x", "def get_weather(html):\n\tcheck_page_type(html)\n\tget_temp(html)\n\tget_table(html)\n\treturn weather_dict", "def process_weather(forecast_file):\n with open(forecast_file) as json_file:\n json_data = json.load(json_file)\n\n min_temp_store = {}\n max_temp_store = {}\n weather_results = str()\n header_results = str()\n\n for day_in_forecast in json_data['DailyForecasts']:\n day_date = day_in_forecast['Date']\n min_temp = day_in_forecast['Temperature']['Minimum'][\"Value\"]\n min_temp_c = convert_f_to_c(min_temp)\n min_temp_store[day_date] = min_temp_c\n max_temp = day_in_forecast['Temperature']['Maximum'][\"Value\"]\n max_temp_c = convert_f_to_c(max_temp)\n max_temp_store[day_date] = max_temp_c\n\n day_time_phrase = day_in_forecast['Day']['LongPhrase']\n rain_chance_day = day_in_forecast['Day']['RainProbability']\n night_time_phrase = day_in_forecast['Night']['LongPhrase']\n rain_chance_night = day_in_forecast['Night']['RainProbability']\n weather_results = weather_results + (f\"-------- {convert_date(day_date)} --------\\nMinimum Temperature: {format_temperature(round(min_temp_c,1))}\\nMaximum Temperature: {format_temperature(round(max_temp_c,1))}\\nDaytime: {day_time_phrase}\\n Chance of rain: {rain_chance_day}%\\nNighttime: {night_time_phrase}\\n Chance of rain: {rain_chance_night}%\\n\")+ \"\\n\"\n\n\n max_day = max(max_temp_store, key=max_temp_store.get)\n max_value = max_temp_store[max_day]\n min_day = min(min_temp_store, key=min_temp_store.get)\n min_value = min_temp_store[min_day]\n max_totals = (sum(max_temp_store.values()))\n min_totals = (sum(min_temp_store.values()))\n num_items = len(min_temp_store)\n mean_min = round(calculate_mean(min_totals,num_items),1)\n mean_max = round(calculate_mean(max_totals,num_items),1)\n\n save_header = (f\"{len(json_data['DailyForecasts'])} Day Overview\\n The lowest temperature will be {format_temperature(round((min_value),1))}, and will occur on {convert_date(min_day)}.\\n The highest temperature will be {format_temperature(round((max_value),1))}, and will occur on {convert_date(max_day)}.\\n The average low this week is {format_temperature(mean_min)}.\\n The average high this week is {format_temperature(mean_max)}.\\n\")\n\n header_results = save_header + \"\\n\"+ weather_results\n \n return(header_results)", "def get_weather(days, hours, db):\n days = format_list_for_db(days)\n hours = format_list_for_db(hours)\n sql = f\"SELECT * FROM weather WHERE day in {days} AND HOUR in {hours}\"\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchall()\n cursor.close()\n\n weathers = []\n if len(data) > 0:\n for weather in data:\n weather = {\"hour\": weather[1],\n \"day\": day_absolute_to_relative(weather[2]),\n \"temperature\": weather[3],\n \"apparenttemperature\": weather[4],\n \"precipitationintensity\": weather[5],\n \"precipitationprobability\": weather[6],\n \"humidity\": weather[7],\n \"dewpoint\": weather[8],\n \"windspeed\": weather[9],\n \"windbearing\": weather[10],\n \"windgust\": weather[11],\n \"pressure\": weather[12],\n \"cloudcover\": weather[13],\n \"uvindex\": weather[14],\n \"visibility\": weather[15]}\n weathers.append(weather)\n return weathers", "def print_weather(self, days):\n if days == 1:\n open_weather = urlopen(self.full_url).read().decode(\"utf8\")\n read_json = json.loads(open_weather)\n outside = self.get_outside_outlook(read_json[\"weather\"])\n wind_speed = read_json[\"wind\"][\"speed\"]\n wind_direction = self.deg_to_compass(read_json[\"wind\"][\"deg\"])\n current_temp = self.convert_temp(read_json[\"main\"][\"temp\"])\n print(\"Current Temperature: {:.2f}\\xb0\\n\"\n \"Sky: {}\\n\"\n \"Wind speed: {} MPH\\n\"\n \"Wind direction: {}\".format(current_temp, outside, wind_speed, wind_direction))\n else:\n open_weather = urlopen(self.full_url).read().decode(\"utf8\")\n read_json = json.loads(open_weather)\n outside = read_json[\"list\"]\n \"\"\"\n Should be:\n for temp in outside:\n stuff = temp[\"weather\"]\n for i in stuff:\n print(i['description'])\n\n Each of these will need to be added to a list or a dictionary to print relationally\n \"\"\"\n print(outside)", "def get_weather_forecast(address, update=''):\n \n def method_url():\n url_base = visit_forecast_home(address)\n return url_base\n \n def method_dict():\n basedate = datetime.now()\n basemonth = basedate.strftime(\"%B\").lower()\n \n url_stem = weather_profile(address, \"weather_forecast_url\")\n url_stem = str(url_stem).replace(\"weather-forecast\", str(basemonth) + \"-weather\")\n url_base = url_stem\n \n today = date.today()\n date_last1 = somonth(today.year, today.month - 2)\n date_last = somonth(today.year, today.month - 1)\n date_current = somonth(today.year, today.month)\n date_next = somonth(today.year, today.month+1)\n\n url_last1 = str(str(url_base) +\"?monyr=\"+str(date_last1.month)+\"/1/\"+str(date_last1.year)+\"&view=table\")\n url_last = str(str(url_base) +\"?monyr=\"+str(date_last.month)+\"/1/\"+str(date_last.year)+\"&view=table\")\n url_current = str(str(url_base)+\"?monyr=\"+str(date_current.month)+\"/1/\"+str(date_current.year)+\"&view=table\")\n url_next = str(str(url_base) +\"?monyr=\"+str(date_next.month)+\"/1/\"+str(date_next.year)+\"&view=table\")\n \n url_list = [url_current, url_last, url_next, url_last1] #will give priority to first in this list\n #print(url_list)\n combined_dict = {}\n \n for url in url_list:\n forecast_dict = download_forecast(url)\n print(str(url) + \" downloaded\")\n combined_dict = join_dicts(combined_dict, forecast_dict)\n weather_profile(address, \"weather_forecast_dict\", combined_dict, update)\n print(str(url) + \" added\")\n print(combined_dict)\n \n return combined_dict\n \n title = \"weather_forecast_url\"\n if weather_profile(address, title) == None:\n print(\"/t-Adding/refreshing data...\")\n data = method_url()\n print(data)\n weather_profile(address, title, data, update)\n else:\n print(\"There is existing data for: \" + str(title))\n\n\n title = \"weather_forecast_dict\"\n if weather_profile(address, title) == None or update != '':\n print(\"/t-Adding/refreshing data...\")\n data = method_dict()\n print(data)\n weather_profile(address, title, data, update)\n return weather_profile(address, title)\n else:\n return weather_profile(address, title)", "def get_weather_info(forecast):\n day_forecast = {}\n day_forecast['condition_text'] = forecast['day']['condition']['text']\n #this icon is a url to an image that describes the weather condition\n day_forecast['condition_icon'] = forecast['day']['condition']['icon']\n day_forecast['max_temp'] = forecast['day']['maxtemp_c']\n day_forecast['min_temp'] = forecast['day']['mintemp_c']\n day_forecast['avg_temp'] = forecast['day']['avgtemp_c']\n date = datetime.strptime(forecast['date'], \"%Y-%m-%d\").strftime(\"%b %d:%a\")\n date_format = date.split(':')\n day_forecast['day'] = date_format[0]\n day_forecast['weekday'] = date_format[1]\n return day_forecast", "def _get_weather_data(self, lat, long):\n return {}\n try:\n # get the data\n forecast = self.ds.get_forecast(\n lat, long,\n exclude=[weather.HOURLY, weather.MINUTELY,\n weather.DAILY, weather.ALERTS, weather.FLAGS])\n\n # add lat & long to the hourly weather data for composite key in db\n data = forecast.currently\n data.latitude = lat\n data.longitude = long\n data = data.__dict__\n data.pop(\"time\")\n return data\n except Exception as e:\n print(e)\n return None", "def generate_training_testing_dataset(store_id, transactions, meteo_day, max_days=2500,\n single_barcode=0):\n\n # Get the minimum and maximum of date in the transactions\n min_date = transactions[(transactions['STO_EAN'] == store_id)].min()['TRX_DATETIME'].date()\n max_date = transactions[(transactions['STO_EAN'] == store_id)].max()['TRX_DATETIME'].date()\n\n # Get the number of days between the two date\n num_days = (max_date - min_date).days\n\n # Get the list of unique products barcode in the transactions\n products_barcode = transactions['BARCODE'].unique()\n\n # Only do one single barcode if activated\n if single_barcode is not None:\n products_barcode = [products_barcode[single_barcode]]\n\n\n # Array to contain all training data\n all_data_first_level = []\n\n # For each day and for each product\n for day in xrange(num_days):\n\n print(day)\n\n # If we have already considered more days than allowed, stop\n if day > max_days:\n break\n\n\n # Get the date corresponding to this day\n date = min_date + pd.DateOffset(day)\n # Get the weather of the date\n weather = get_weather_on_date(date, meteo_day, store_id).head(n=1)\n\n # If the weather is empty we skip this day\n if weather.empty:\n continue\n\n # For each product to include\n for product_barcode in products_barcode:\n\n # Get the volume and inventory data\n volume = get_volume_product_on_date(product_barcode, date, store_id, transactions)\n\n # If no volume could be found skip this date,product pair\n if volume is None:\n continue\n\n # Get the type of the current date\n day_type = generate_day_type(date)\n\n\n # Generating complex features based on the simpler one\n\n # This contains respectively yesterday, the day before yesterday and the same day as current one in\n # previous week\n yesterday = date - pd.DateOffset(1)\n two_days_ago = date - pd.DateOffset(2)\n one_week_ago = date - pd.DateOffset(7)\n\n # Get the day type of yesterday and 2 days ago\n day_type_yesterday = generate_day_type(yesterday)\n day_type_2days_ago = generate_day_type(two_days_ago)\n\n # Get the volume of yesterday, 2days ago and 1 week ago\n volume_yesterday = get_volume_product_on_date(product_barcode, yesterday, store_id, transactions)\n volume_2days_ago = get_volume_product_on_date(product_barcode, two_days_ago, store_id, transactions)\n volume_one_week_ago = get_volume_product_on_date(product_barcode, one_week_ago, store_id, transactions)\n\n\n # Get the total sales and the total weight of product done yesterday, 2 days ago and 1 week ago\n volume_price_yesterday = 0\n volume_weight_yesterday = 0\n if volume_yesterday is not None:\n volume_price_yesterday = volume_yesterday[\"price\"]\n volume_weight_yesterday = volume_yesterday[\"weight\"]\n\n volume_price_2days_ago = 0\n volume_weight_2days_ago = 0\n if volume_2days_ago is not None:\n volume_price_2days_ago = volume_2days_ago[\"price\"]\n volume_weight_2days_ago = volume_2days_ago[\"weight\"]\n\n volume_price_one_week_ago = 0\n volume_weight_one_week_ago = 0\n if volume_one_week_ago is not None:\n volume_price_one_week_ago = volume_one_week_ago[\"price\"]\n volume_weight_one_week_ago = volume_one_week_ago[\"weight\"]\n\n\n\n # Using historical weather data\n weather_yesterday = get_weather_on_date(yesterday, meteo_day, store_id).head(n=1)\n temperature_min_yesterday = 0\n temperature_max_yesterday = 0\n if not weather_yesterday.empty:\n temperature_min_yesterday = weather_yesterday['TEMPERATURE_VALUE_MIN'].values[0]\n temperature_max_yesterday = weather_yesterday['TEMPERATURE_VALUE_MIN'].values[0]\n\n\n #tmp = [weather['TEMPERATURE_VALUE_MIN'].values[0], weather['TEMPERATURE_VALUE_MAX'].values[0],\n # weather['PRECIPITATION_VALUE'].values[0], weather['SUNSHINE_DURATION'].values[0],\n # weather['SNOW_DEPTH'].values[0], day_type, volume[\"price\"], volume[\"weight\"]]\n\n\n # Saving Features\n tmp = [weather['TEMPERATURE_VALUE_MIN'].values[0], weather['TEMPERATURE_VALUE_MAX'].values[0],\n day_type, volume[\"price\"], volume_price_yesterday,volume_weight_yesterday,\n volume_price_2days_ago, volume_weight_2days_ago,\n volume_price_one_week_ago, volume_weight_one_week_ago, temperature_min_yesterday,\n temperature_max_yesterday,day_type_yesterday, day_type_2days_ago,\n volume[\"weight\"]]\n\n all_data_first_level.append(tmp)\n\n return all_data_first_level", "def get_weather(address, update = ''):\n \n def proceed_with_method():\n if update == 'forecast':\n precip_hist_dict = historic_weather_data(address, \"P\")\n temp_hist_dict = historic_weather_data(address, \"T\") \n else:\n precip_hist_dict = historic_weather_data(address, \"P\", update)\n temp_hist_dict = historic_weather_data(address, \"T\", update) \n \n if update == 'history':\n forecast_dict = get_weather_forecast(address)\n else:\n forecast_dict = get_weather_forecast(address, update)\n\n \n precip_forecast_dict = {}\n temp_forecast_dict = {}\n for key, item in forecast_dict.items():\n precip_forecast_dict[key] = item[1]\n temp_forecast_dict[key] = item[0]\n \n precip_dict = join_historic_forecast(precip_hist_dict, precip_forecast_dict)\n #use adj join for temp, forecast is not accurate, this at least gives a shape\n temp_dict = adj_join_historic_forecast(temp_hist_dict, temp_forecast_dict)\n return [precip_dict, temp_dict]\n \n title = \"weather_dict_temp\"\n if weather_profile(address, title) == None or update != '':\n print(\"/t-Adding/refreshing data...\")\n data = proceed_with_method()\n weather_profile(address, title, data[1], update)\n else:\n print(\"There is existing data for: \" + str(title))\n \n title = \"weather_dict_precip\"\n if weather_profile(address, title) == None or update != '':\n print(\"/t-Adding/refreshing data...\")\n data = proceed_with_method()\n weather_profile(address, title, data[0], update)\n return [weather_profile(address, title),weather_profile(address, \"weather_dict_temp\")]\n else:\n return [weather_profile(address, title),weather_profile(address, \"weather_dict_temp\")]\n print(\"There is existing data for: \" + str(title))", "def get_weather_data():\n keys = ['1364038.csv',\n '1364041.csv',\n '1364042.csv',\n '1364043.csv',\n '1364044.csv',\n '1364046.csv',\n '1364047.csv',\n '1364048.csv',\n '1364051.csv',\n '1364052.csv',\n '1364053.csv',\n '1364054.csv',\n '1364055.csv',\n '1364058.csv',\n '1364059.csv',\n '1364060.csv',\n '1364061.csv',\n '1364062.csv',\n '1364063.csv',\n '1364064.csv',\n '1364066.csv']\n df_weather = import_weather(keys)\n df_weather_dist = df_weather[[\n 'LATITUDE', 'LONGITUDE', 'name']].drop_duplicates().reset_index()\n return df_weather, df_weather_dist", "def get_weather(self):\n with urllib.request.urlopen(self.url) as response:\n json_data = response.read().decode('utf-8')\n\n data = json.loads(json_data)\n\n weather = {}\n weather['current'] = {\n 'temp': round(data['current']['temp_f']),\n 'humidity': round(data['current']['humidity']),\n 'summary': data['current']['condition']['text']\n }\n today = data['forecast']['forecastday'][0]['day']\n weather['today'] = {\n 'temp': round(today['maxtemp_f']),\n 'summary': today['condition']['text']\n }\n \n return weather", "def process_weather(forecast_file):\n # Load json data file\n \n with open(forecast_file) as json_file:\n json_data = json.load(json_file)\n \n # Set Variables, Dictionaries and Lists\n days_list = []\n temp_dict = {}\n daily_dict = {}\n\n num_items = 0\n total_sum_min = 0\n total_sum_max = 0\n days = len(json_data['DailyForecasts'])\n days_list = days_in_data(days)\n\n t_temp_min = 100\n t_temp_max = 0\n\n # Pull through the data\n\n for day in days_list:\n num_items += 1\n date = convert_date(json_data['DailyForecasts'][day]['Date'])\n min_temp = convert_f_to_c(json_data['DailyForecasts'][day]['Temperature']['Minimum']['Value'])\n total_sum_min += min_temp\n max_temp = convert_f_to_c(json_data['DailyForecasts'][day]['Temperature']['Maximum']['Value'])\n total_sum_max += max_temp\n day_desc = json_data['DailyForecasts'][day]['Day']['LongPhrase']\n chance_rain_day = json_data['DailyForecasts'][day]['Day']['RainProbability']\n night_desc = json_data['DailyForecasts'][day]['Night']['LongPhrase']\n chance_rain_night = json_data['DailyForecasts'][day]['Night']['RainProbability']\n \n if min_temp < t_temp_min:\n t_temp_min = min_temp\n t_temp_mindate = date\n else:\n pass\n if max_temp > t_temp_max:\n t_temp_max = max_temp\n t_temp_maxdate = date\n else:\n pass\n \n daily_dict[day] = [date, min_temp, max_temp, day_desc, chance_rain_day, night_desc, chance_rain_night]\n # 0 1 2 3 4 5 6 \n \n # print(temp_dict)\n # print(daily_dict)\n\n # Calculate Minimum, Maximum and Mean temperatures\n\n mean_min = format_temperature(calculate_mean(total_sum_min, num_items))\n # print(mean_min)\n mean_max = format_temperature(calculate_mean(total_sum_max, num_items))\n # print(mean_max)\n\n # Format Minimum and Maximum temperatures\n min_temp_format = format_temperature(t_temp_min)\n max_temp_format = format_temperature(t_temp_max)\n\n ##############################################################################################\n\n # Combine string messages to return to user\n\n str_Output = \"\"\n Output_gen1 = (f\"{num_items} Day Overview\\n\")\n Output_gen2 = (f\" The lowest temperature will be {min_temp_format}, and will occur on {t_temp_mindate}.\\n\")\n Output_gen3 = (f\" The highest temperature will be {max_temp_format}, and will occur on {t_temp_maxdate}.\\n\")\n Output_gen4 = (f\" The average low this week is {mean_min}.\\n\")\n Output_gen5 = (f\" The average high this week is {mean_max}.\\n\")\n str_Output = Output_gen1 + Output_gen2 + Output_gen3 + Output_gen4 + Output_gen5\n for key, value in daily_dict.items():\n Output_daily0 = (\"\\n\")\n Output_daily1 = (f\"-------- {value[0]} --------\\n\")\n Output_daily2 = (f\"Minimum Temperature: {format_temperature(value[1])}\\n\")\n Output_daily3 = (f\"Maximum Temperature: {format_temperature(value[2])}\\n\")\n Output_daily4 = (f\"Daytime: {value[3]}\\n\")\n Output_daily5 = (f\" Chance of rain: {value[4]}%\\n\")\n Output_daily6 = (f\"Nighttime: {value[5]}\\n\")\n Output_daily7 = (f\" Chance of rain: {value[6]}%\\n\")\n str_Output = str_Output + Output_daily0 + Output_daily1 + Output_daily2 + Output_daily3 + Output_daily4 + Output_daily5 + Output_daily6 + Output_daily7\n str_Output = str_Output +\"\\n\"\n\n return str_Output", "def get_data_for_day(i,t0):\n t0 = UTCDateTime(t0)\n\n # open clients\n client = FDSNClient(\"GEONET\")\n client_nrt = FDSNClient('https://service-nrt.geonet.org.nz')\n \n daysec = 24*3600\n data_streams = [[2, 5], [4.5, 8], [8,16]]\n names = ['rsam','mf','hf']\n\n # download data\n datas = []\n try:\n site = client.get_stations(starttime=t0+i*daysec, endtime=t0 + (i+1)*daysec, station='WIZ', level=\"response\", channel=\"HHZ\")\n except FDSNNoDataException:\n pass\n\n try:\n WIZ = client.get_waveforms('NZ','WIZ', \"10\", \"HHZ\", t0+i*daysec, t0 + (i+1)*daysec)\n \n # if less than 1 day of data, try different client\n if len(WIZ.traces[0].data) < 600*100:\n raise FDSNNoDataException('')\n except ObsPyMSEEDFilesizeTooSmallError:\n return\n except FDSNNoDataException:\n try:\n WIZ = client_nrt.get_waveforms('NZ','WIZ', \"10\", \"HHZ\", t0+i*daysec, t0 + (i+1)*daysec)\n except FDSNNoDataException:\n return\n\n # process frequency bands\n WIZ.remove_sensitivity(inventory=site)\n data = WIZ.traces[0].data\n ti = WIZ.traces[0].meta['starttime']\n # round start time to nearest 10 min increment\n tiday = UTCDateTime(\"{:d}-{:02d}-{:02d} 00:00:00\".format(ti.year, ti.month, ti.day))\n ti = tiday+int(np.round((ti-tiday)/600))*600\n N = 600*100 # 10 minute windows in seconds\n Nm = int(N*np.floor(len(data)/N))\n for data_stream, name in zip(data_streams, names):\n filtered_data = bandpass(data, data_stream[0], data_stream[1], 100)\n filtered_data = abs(filtered_data[:Nm])\n datas.append(filtered_data.reshape(-1,N).mean(axis=-1)*1.e9)\n\n # compute dsar\n data = cumtrapz(data, dx=1./100, initial=0)\n data -= np.mean(data)\n j = names.index('mf')\n mfd = bandpass(data, data_streams[j][0], data_streams[j][1], 100)\n mfd = abs(mfd[:Nm])\n mfd = mfd.reshape(-1,N).mean(axis=-1)\n j = names.index('hf')\n hfd = bandpass(data, data_streams[j][0], data_streams[j][1], 100)\n hfd = abs(hfd[:Nm])\n hfd = hfd.reshape(-1,N).mean(axis=-1)\n dsar = mfd/hfd\n datas.append(dsar)\n names.append('dsar')\n\n # write out temporary file\n datas = np.array(datas)\n time = [(ti+j*600).datetime for j in range(datas.shape[1])]\n df = pd.DataFrame(zip(*datas), columns=names, index=pd.Series(time))\n df.to_csv('_tmp/_tmp_fl_{:05d}.dat'.format(i), index=True, index_label='time')", "def get_operational_forecasts(self):\n\n # Real time ensemble data:\n # https://www.ftp.ncep.noaa.gov/data/nccf/com/ens_tracker/prod/\n\n # If forecasts dict already exist, simply return the dict\n try:\n self.forecast_dict\n return self.forecast_dict\n except:\n pass\n\n # Follow HURDAT procedure\n if self.source == \"hurdat\":\n\n # Get storm ID & corresponding data URL\n storm_id = self.dict['operational_id']\n storm_year = self.dict['year']\n if storm_year <= 2006:\n storm_id = self.dict['id']\n if storm_year < 1954:\n msg = \"Forecast data is unavailable for storms prior to 1954.\"\n raise RuntimeError(msg)\n\n # Error check\n if storm_id == '':\n msg = \"No NHC operational data is available for this storm.\"\n raise RuntimeError(msg)\n\n # Check if archive directory exists for requested year, if not redirect to realtime directory\n url_models = f\"https://ftp.nhc.noaa.gov/atcf/archive/{storm_year}/a{storm_id.lower()}.dat.gz\"\n if requests.get(url_models).status_code != 200:\n url_models = f\"https://ftp.nhc.noaa.gov/atcf/aid_public/a{storm_id.lower()}.dat.gz\"\n\n # Retrieve model data text\n if requests.get(url_models).status_code == 200:\n request = urllib.request.Request(url_models)\n response = urllib.request.urlopen(request)\n sio_buffer = BytesIO(response.read())\n gzf = gzip.GzipFile(fileobj=sio_buffer)\n data = gzf.read()\n content = data.splitlines()\n content = [(i.decode()).split(\",\") for i in content]\n content = [i for i in content if len(i) > 10]\n response.close()\n else:\n raise RuntimeError(\n \"No operational model data is available for this storm.\")\n\n # Follow JTWC procedure\n else:\n\n url_models_noaa = f\"https://www.ssd.noaa.gov/PS/TROP/DATA/ATCF/JTWC/a{self.id.lower()}.dat\"\n url_models_ucar = f\"http://hurricanes.ral.ucar.edu/repository/data/adecks_open/{self.year}/a{self.id.lower()}.dat\"\n\n # Retrieve model data text\n try:\n content = read_url(url_models_noaa, split=True, subsplit=False)\n except:\n try:\n content = read_url(\n url_models_ucar, split=True, subsplit=False)\n except:\n raise RuntimeError(\n \"No operational model data is available for this storm.\")\n content = [i.split(\",\") for i in content]\n content = [i for i in content if len(i) > 10]\n\n # Iterate through every line in content:\n forecasts = {}\n for line in content:\n\n # Get basic components\n lineArray = [i.replace(\" \", \"\") for i in line]\n try:\n basin, number, run_init, n_a, model, fhr, lat, lon, vmax, mslp, stype, rad, windcode, neq, seq, swq, nwq = lineArray[\n :17]\n use_wind = True\n except:\n basin, number, run_init, n_a, model, fhr, lat, lon, vmax, mslp, stype = lineArray[\n :11]\n use_wind = False\n\n # Check init time is within storm time range\n run_init_dt = dt.strptime(run_init, '%Y%m%d%H')\n if run_init_dt < self.dict['time'][0] - timedelta(hours=6) or run_init_dt > self.dict['time'][-1] + timedelta(hours=6):\n continue\n \n # Skip erroneous lines\n try:\n if int(fhr) > 240:\n continue\n except:\n continue\n\n # Enter into forecast dict\n if model not in forecasts.keys():\n forecasts[model] = {}\n if run_init not in forecasts[model].keys():\n forecasts[model][run_init] = {\n 'init': run_init_dt, 'fhr': [], 'lat': [], 'lon': [], 'vmax': [], 'mslp': [], 'type': [], 'windrad': []\n }\n\n # Format lat & lon\n fhr = int(fhr)\n if \"N\" in lat:\n lat_temp = lat.split(\"N\")[0]\n lat = round(float(lat_temp) * 0.1, 1)\n elif \"S\" in lat:\n lat_temp = lat.split(\"S\")[0]\n lat = round(float(lat_temp) * -0.1, 1)\n if \"W\" in lon:\n lon_temp = lon.split(\"W\")[0]\n lon = round(float(lon_temp) * -0.1, 1)\n elif \"E\" in lon:\n lon_temp = lon.split(\"E\")[0]\n lon = round(float(lon_temp) * 0.1, 1)\n\n # Format vmax & MSLP\n if vmax == '':\n vmax = np.nan\n else:\n vmax = int(vmax)\n if vmax < 10 or vmax > 300:\n vmax = np.nan\n if mslp == '':\n mslp = np.nan\n else:\n mslp = int(mslp)\n if mslp < 1:\n mslp = np.nan\n\n # Format wind radii\n if use_wind:\n try:\n rad = int(rad)\n if rad in [0, 35]:\n rad = 34\n neq = np.nan if windcode == '' else int(neq)\n seq = np.nan if windcode in ['', 'AAA'] else int(seq)\n swq = np.nan if windcode in ['', 'AAA'] else int(swq)\n nwq = np.nan if windcode in ['', 'AAA'] else int(nwq)\n except:\n rad = 34\n neq = np.nan\n seq = np.nan\n swq = np.nan\n nwq = np.nan\n else:\n rad = 34\n neq = np.nan\n seq = np.nan\n swq = np.nan\n nwq = np.nan\n\n # Add forecast data to dict if forecast hour isn't already there\n if fhr not in forecasts[model][run_init]['fhr']:\n if model in ['OFCL', 'OFCI'] and fhr > 120:\n pass\n else:\n if lat == 0.0 and lon == 0.0:\n continue\n forecasts[model][run_init]['fhr'].append(fhr)\n forecasts[model][run_init]['lat'].append(lat)\n forecasts[model][run_init]['lon'].append(lon)\n forecasts[model][run_init]['vmax'].append(vmax)\n forecasts[model][run_init]['mslp'].append(mslp)\n forecasts[model][run_init]['windrad'].append(\n {rad: [neq, seq, swq, nwq]})\n\n # Get storm type, if it can be determined\n if stype in ['', 'DB'] and vmax != 0 and not np.isnan(vmax):\n stype = get_storm_type(vmax, False)\n forecasts[model][run_init]['type'].append(stype)\n else:\n ifhr = forecasts[model][run_init]['fhr'].index(fhr)\n forecasts[model][run_init]['windrad'][ifhr][rad] = [\n neq, seq, swq, nwq]\n\n # Save dict locally\n self.forecast_dict = forecasts\n\n # Return dict\n return forecasts", "def print_weather_summary(weather_data: dict, days: int):\n for day in range(days):\n print_daily_summary(weather_data[\"daily\"][day])", "def get_data_yoshinoya(storeid):\n\n store_details = {'storeid': storeid}\n\n url = 'https://stores.yoshinoya.com/{}'.format(storeid)\n\n try:\n r = requests.get(url, allow_redirects=False)\n\n # Catch responses > 300\n if r.status_code >= 300:\n raise HTTPError\n\n except HTTPError:\n print(\"Page not found. Status code is not 200.\")\n return None\n\n soup = BeautifulSoup(r.text, 'html.parser')\n\n # Get brand and name of the store\n LocationNamebrand = soup.find('span', {'class': \"LocationName-brand\"})\n\n if 'ysn' in storeid:\n store_details['brand'] = 'ๅ‰้‡Žๅฎถ'\n elif 'hnmr' in storeid:\n store_details['brand'] = 'ๅ‰้‡Žๅฎถร—ใฏใชใพใ‚‹ใ†ใฉใ‚“'\n\n name = LocationNamebrand.text.strip(store_details['brand'])\n\n store_details['name'] = name.strip(' ')\n\n # Get lat and lon\n geo = soup.find('meta', {'name': 'geo.position'})\n\n latlon = geo['content'].split(';')\n\n store_details['lat'] = latlon[0]\n store_details['lon'] = latlon[1]\n\n # Get location attributes\n location = soup.find('address')\n\n # First span in the address class is the postalCode\n postalCode_class = location.span.extract()\n\n store_details['postalCode'] = postalCode_class.text\n\n # Remove the prefix of the address\n store_details['address'] = location.text.strip('ใ€’ ')\n\n # Get day-specific opening hours\n hours = soup.find('div',\n {'class': 'c-location-hours-details-wrapper js-location-hours-table'})\n\n # Convert special characters in list (string format) from js to python\n for old, new in [('true', 'True'), ('false', 'False')]:\n hours['data-days'] = hours['data-days'].replace(old, new)\n\n # Convert the opening hours' list from string to array\n openinghours_list = eval(hours['data-days'])\n\n for day in openinghours_list:\n\n dayOfTheWeek = day['day']\n\n try:\n openingTime = day['intervals'][0]['start']\n closingTime = day['intervals'][0]['end']\n\n # Attribute 'intervals' is empty if the store is closed on that day\n except IndexError:\n openingTime = None\n closingTime = None\n\n # Create key in the form of 'MON_open', 'MON_close', 'TUE_open', etc.\n store_details[f'{dayOfTheWeek[:3]}_open'] = openingTime\n store_details[f'{dayOfTheWeek[:3]}_close'] = closingTime\n\n return store_details", "def get_weather_info(forecast):\n day_forecast = {}\n try:\n day_forecast['condition_text'] = forecast['day']['condition']['text']\n # this icon is a url to an image that describes the weather condition\n day_forecast['condition_icon_url'] = forecast['day']['condition']['icon']\n day_forecast['max_temp'] = forecast['day']['maxtemp_c']\n day_forecast['min_temp'] = forecast['day']['mintemp_c']\n day_forecast['avg_temp'] = forecast['day']['avgtemp_c']\n day_info = forecast['date']\n except KeyError:\n raise Exception(\"Could not parse weather data accurately,check out valid response fields at \"\n \"{valid_response_fields_url} and modify the code as necessary\"\n .format(valid_response_fields_url=valid_response_fields_url))\n day_info = arrow.get(day_info).format('MMM-DD:dddd').split(':')\n day_forecast['day'] = day_info[0]\n day_forecast['weekday'] = day_info[1]\n return day_forecast", "def get_energy_demand_values_day(weather_data, houses_list, houses_dict,\n energy_factor_types, energy_demands_types,\n load_curve_houses, load_profile_df,\n daily_energy_demand_houses):\n start = weather_data.index[0]\n while start < weather_data.index[-1]:\n end = start + pd.Timedelta('1 days')\n if logger.isEnabledFor(logging.INFO):\n print('\\rProgress: '+str(start), end='\\r') # print progress\n typtag = weather_data.loc[start]['typtag']\n for house_name in houses_list:\n house_type = houses_dict[house_name]['house_type']\n for i, energy_factor_type in enumerate(energy_factor_types):\n energy_demand_type = energy_demands_types[i]\n # Example: Q_Heiz_TT(t) = F_Heiz_TT(t) * Q_Heiz_TT\n load_curve_houses.loc[start:end, (house_name,\n energy_demand_type)] =\\\n load_profile_df.loc[start:end, (energy_factor_type,\n house_type)] *\\\n daily_energy_demand_houses.loc[(house_name,\n energy_demand_type), typtag]\n# print(load_curve_houses.loc[start:end])\n start = end\n\n if logger.isEnabledFor(logging.INFO):\n # overwrite last status with empty line\n print('\\r', end='\\r')\n\n return load_curve_houses", "def get_weather_data() -> dict:\n # Creating the url for the api call\n api_key = \"96bba64ba34672da132c1a987ad2fee6\"\n lat = 49.24\n long = -123.15\n config = '&units=metric'\n url = f'https://api.openweathermap.org/data/2.5/onecall?lat={lat}&lon={long}&appid={api_key}{config}'\n\n # Querying and JSON parsing\n api_return = requests.get(url)\n weather_data = api_return.json()\n return weather_data", "def get_weather_data(lat, lon):\n\n # Get weather\n filedata = pvtoolslib.get_s3_filename_df()\n filedata_closest = nsrdbtools.find_closest_datafiles(float(lat), float(lon),\n filedata)\n\n filename = filedata_closest['filename'].iloc[0]\n\n if filename == '124250_37.93_-122.3.npz':\n weather, info = nsrdbtools.get_local_weather_data(filename)\n else:\n weather, info = pvtoolslib.get_s3_weather_data(filename)\n\n return weather, info", "def get_hourly(location_list):\n location, human_location = location_list\n query = location\n url = \"http://api.wunderground.com/auto/wui/geo/WXCurrentObXML/index.xml?query=%s\" % query\n f = urllib2.urlopen(url)\n xml = f.read()\n root = ET.XML(xml)\n \n current = {'location': location, 'human_location': human_location}\n current['observation_time'] = parser.parse(root.find('observation_time').text.replace('Last Updated on',''))\n current['temperature'] = root.find('temp_f').text\n current['humidity'] = root.find('relative_humidity').text.strip('%') #Remove %\n current['wind_speed'] = root.find('wind_mph').text\n current['wind_direction'] = root.find('wind_dir').text\n current['icon'] = root.find('icon').text\n current['conditions'] = root.find('weather').text\n try:\n f = Forecast(**current)\n f.save()\n except:\n logging.info(\"Hourly Forecast Data missing or no new data available\")", "def _get_information(self):\n weather_dict = {}\n table_body = self.climate_table\n\n rows = table_body.find_all('tr')\n months = [col.get_text() for col in rows[0].find_all('td')[1:]]\n\n for row in rows[1:]:\n cols = row.find_all('td')\n key = cols[0].get_text()\n value_getter = self._value_getters_by_key.get(key, self._get_remote_workers)\n\n weather_dict.update({key: [(months[i],) + value_getter(col) for i, col in enumerate(cols[1:])]})\n\n return weather_dict", "def get_data(table_name, end, num, start=None):\n if start == None:\n if table_name == \"days\": start = end - timedelta(days=num-1) \n if table_name == \"weeks\": start = end - timedelta(weeks=num-1) \n if table_name == \"months\": start = end - relativedelta(months=+num-1) \n if table_name == \"years\": start = end - relativedelta(years=+num-1) \n else: \n start = days.get_entry(table_name, start).date\n \n dates = []\n data = []\n weather = []\n density = []\n \n while start <= end:\n entry = days.get_entry(table_name, start)\n data.append(entry.sentiment)\n \n if table_name == \"days\": \n dates.append(entry.date.strftime(\"%B %d, %Y\"))\n start = start + timedelta(days=1)\n if table_name == \"weeks\": \n dates.append(entry.date.strftime(\"%B %d, %Y\"))\n start = start + timedelta(weeks=1) \n if table_name == \"months\": \n dates.append(entry.date.strftime(\"%B %Y\"))\n start = start + relativedelta(months=+1) \n if table_name == \"years\": \n dates.append(entry.date.strftime(\"%Y\"))\n start = start + relativedelta(years=+1) \n\n # 7/15/15 is the last entry in the current weather dictionary\n num_days = (min(start, date(2015,7,15)) - entry.date).days\n temp = {entry.date + timedelta(days=i): weather_dict[entry.date + timedelta(days=i)] for i in range(num_days)}\n weather.append(float(sum(temp.values()))/float(len(temp)))\n\n if density_dict != None:\n d = max(entry.date, date(2014,7,1))\n num_days = (min(start, date(2015,7,28)) - d).days\n rho = {d + timedelta(days=i): density_dict[d + timedelta(days=i)] for i in range(num_days)}\n density.append(float(sum(rho.values()))/float(len(rho)))\n\n return dates, data, weather, density", "def main():\r\n country = str(input(\"\\nPlease, enter a country, where you want to ski: \"))\r\n date = str(input(\"Please, input a data in format yyyy-MM-dd: \"))\r\n if get_all_data(country, date) == \"!\":\r\n return \r\n get_all_data(country, date)\r\n all_data_end = convert_in() # list with SkiWeather items\r\n \r\n print(\"\\nIn {0}'s ski resorts during {1} the next weather is predicted:\\n\".format(country, date))\r\n \r\n for res in all_data_end:\r\n if res.name() != None:\r\n if res.whether() == [False] * 3:\r\n print(\"\\tIn {0}: the weather will not be suitable for skiing during all day.\".format(res.name()))\r\n elif res.whether() == [True] * 3:\r\n print(\"\\tIn {0}: the weather will be suitable for skiing during all day.\".format(res.name()))\r\n else:\r\n print(\"\\tIn {0}:\".format(res.name()))\r\n if res.whether()[0] == True:\r\n print(\"\\t\\tin the morning the weather will be suitable for skiing;\")\r\n if res.whether()[0] == False:\r\n print(\"\\t\\tin the morning the weather will not be suitable for skiing;\")\r\n if res.whether()[1] == True:\r\n print(\"\\t\\tin the afternoon the weather will be suitable for skiing;\")\r\n if res.whether()[1] == False:\r\n print(\"\\t\\tin the afternoon the weather will not be suitable for skiing;\")\r\n if res.whether()[2] == True:\r\n print(\"\\t\\tin the eveningg the weather will be suitable for skiing.\")\r\n if res.whether()[2] == False:\r\n print(\"\\t\\tin the evening the weather will not be suitable for skiing.\")\r\n \r\n return \"\\n Have a good rest!!!\\n\"", "def findWetWeatherDays(self, dbsession, today):\n wetDays = dbsession.query(self.dt).filter(or_(self.weather_description == \"light rain\", self.weather_description == \"moderate rain\")).all()\n # if one of those days is today return it.\n # else just return a wet day.\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]", "def get_weather(station_id):\n latitude, longitude = helper.get_station_coordinate(db, station_id)\n return jsonify(scrape(latitude, longitude))", "def get_forecast():\n json_data = json.loads(make_forecastio_request())\n daily = json_data['daily']['data']\n forecast = []\n for day in daily:\n forecast.append(day['icon'])\n if len(forecast) < 4:\n giveup()\n return forecast", "def get_city_weather_and_dispatch(city):\n # accumulate weather record for city\n check_path(exp_data_path + os.sep + 'weather')\n check_path(exp_data_path + os.sep + 'weather' + os.sep + city)\n time_index = np.load(exp_data_path + os.sep + 'station_list' + os.sep + 'time_index.npy', allow_pickle=True)\n time_index = dict(time_index.tolist())\n with open(exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + 'whole_{}.csv'.format(city), \"w\") as f:\n writer = csv.writer(f)\n res = []\n for file in os.listdir(spider_data_path + os.sep + 'weather' + os.sep + city):\n with open(spider_data_path + os.sep + 'weather' + os.sep + city + os.sep + file) as day:\n reader = csv.reader(day)\n for line in reader:\n line[0] = line[0].replace(\"\\ufeff\", \"\")\n if line[0] in time_index['rev_index']:\n line.insert(0, time_index['rev_index'][line[0]])\n res.append(line)\n res = sorted(res, key=lambda x : x[0])\n writer.writerows(res)\n\n # replace index and divide into concrete matter\n name_pos = {'weather_type': 2, 'temperature': 3, 'air': 4, 'wind': 6}\n con_res = {}\n with open(exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + 'whole_{}.csv'.format(city)) as f:\n reader = csv.reader(f)\n for line in reader:\n for name, index in name_pos.items():\n if name not in con_res:\n con_res[name] = []\n con_res[name].append([line[0], line[index]])\n\n for name, index in name_pos.items():\n with open(exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_{}.csv'.format(city, name), \"w\") as f:\n writer = csv.writer(f)\n for line in con_res[name]:\n writer.writerow(line)", "def get_forecasts(api_key, lat, lng):\n current_time = datetime.datetime.now()\n forecast = forecastio.load_forecast(api_key, lat, lng, time=current_time)\n result = {}\n for day in forecast.daily().data:\n sunrise = pytz.utc.localize(day.sunriseTime)\n sundown = pytz.utc.localize(day.sunsetTime)\n print('Sun up: {}, sun down: {}, moon phase: {}'.format(sunrise, sundown, day.moonPhase))\n day = forecast.daily().data[0]\n result['sunrise'] = pytz.utc.localize(day.sunriseTime).replace(tzinfo=datetime.timezone.utc).astimezone(tz=None)\n result['sunset'] = pytz.utc.localize(day.sunsetTime).replace(tzinfo=datetime.timezone.utc).astimezone(tz=None)\n result['moonphase'] = day.moonPhase\n return result", "def get_temperature(self, day):\r\n return self.temperatures[day]", "def get_zakopane_hourly_weather():\n zakopane = TwelveHoursWeatherForecast(location.get(\"zakopane\", \"\"))\n zakopane_weather_detail = zakopane.get_hourly_weather_details()\n zakopane_hourly_weather_detail = []\n for data in zakopane_weather_detail:\n zakopane_hourly_weather_detail.append(data)\n return zakopane_hourly_weather_detail", "def full_broadcast(city_name):\n today_weather(city_name)\n for i in range(1, 4):\n output = (\n (datetime.date.today() + datetime.timedelta(days=i)).strftime(\"%d/%m/%Y\")\n + \"Temperature: \"\n + \"{}\".format(\n get_weather(\n city_name,\n (datetime.date.today() + datetime.timedelta(days=i)).strftime(\n \"%d/%m/%Y\"\n ),\n )\n )\n )\n print(output)", "def getHourlyWind(self, keyword):\n\n\t\tweather_data = self.getHourlyWeatherFromCSV(keyword, \"f\", \"wind\")\n\t\twind_values = [] # Array that will contain all the wind data\n\t\twind_data = {} # Dictionary of wind data\n\n\t\t# Getting humidity data\n\t\tfor data in weather_data:\n\t\t\twind_data[\"x\"] = self.helper.getDateInEpoch(data[\"date\"])\n\t\t\twind_data[\"y\"] = float(data[\"wind\"].split(\" \")[1])\n\t\t\twind_values.append(wind_data)\n\t\t\twind_data = {}\n\n\t\treturn wind_values", "def process(self, entity):\n\n\t\tresp = ''\n\n\t\t# last time forecast.io was called\n\t\tnow = time.localtime()\n\t\tdiff = (time.mktime(now) - time.mktime(self.weather_time)) / 60\n\t\t# print 'diff', diff\n\n\t\t# grab the json info\n\t\tj = self.forecast.json\n\n\t\t# update if it has been too long ... 5 mins\n\t\tif diff > 5:\n\t\t\tself.forecast.update()\n\t\t\tj = self.forecast.json\n\t\t\tself.weather_time = now\n\t\t\tprint 'update'\n\n\t\t# get weather asked for: today, tomorrow, monday, sunday, etc\n\t\tif 'datetime' in entity:\n\t\t\tt = entity['datetime'][0]['value']\n\t\t\tasked = time.strptime(t.split('.')[0],'%Y-%m-%dT%H:%M:%S')\n\n\t\t\t# get how many days in future\n\t\t\tw_time = asked.tm_mday - now.tm_mday\n\t\t\tif w_time >= 0 or w_time < 7: resp = self.grabWeatherDay( int( w_time ) )\n\n\t\telse:\n\t\t\ttemp = j['currently']['apparentTemperature']\n\t\t\train = j['currently']['precipProbability']*100.0\n\t\t\tresp = 'The weather is currently {0:d} degrees with {1:d} percent chance of rain'.format(temp, rain)\n\n\t\treturn resp", "def get_all_station_feature(city):\n poi_frequency = np.load(exp_data_path + os.sep + 'poi_frequency' + os.sep + 'poi_frequency_{}.npy'.format(city),\n allow_pickle=True) # .tolist()\n poi_num = np.load(exp_data_path + os.sep + 'poi' + os.sep + 'poi_{}.npy'.format(city), allow_pickle=True)\n poi_entropy = np.load(exp_data_path + os.sep + 'poi_entropy' + os.sep + 'poi_entropy_{}.npy'.format(city),\n allow_pickle=True)\n road = np.load(exp_data_path + os.sep + 'roadnet' + os.sep + 'roadnet_{}.npy'.format(city), allow_pickle=True)\n transportation = np.load(exp_data_path + os.sep + 'transportation' + os.sep + 'transportation_{}.npy'.format(city),\n allow_pickle=True)\n commerce = np.load(exp_data_path + os.sep + 'commerce' + os.sep + 'commerce_{}.npy'.format(city), allow_pickle=True)\n\n file_name = exp_data_path + os.sep + 'station' + os.sep + 'all_demand_{}.npy'.format(city)\n demand_data = np.load(file_name, allow_pickle=True)\n num = demand_data[:, 0, -2, np.newaxis] # todo check meaning here, get quick and slow feature\n\n raw_data = np.concatenate((num, poi_frequency, poi_num, poi_entropy, road, transportation, commerce), axis=1)\n csv_data = pd.DataFrame(raw_data, columns=GENERAL_HEADER)\n\n file_path = exp_data_path + os.sep + 'static' + os.sep + 'static_feature_{}.csv'.format(city)\n if os.path.exists(file_path):\n os.remove(file_path)\n csv_data.to_csv(file_path)\n pass", "def print_daily_forecast(update, context, day='today'):\n city = context.user_data['city']\n provider = context.user_data['provider']\n data = context.bot_data['forecast_data']\n for d in data: \n if d['city'] == city and d['provider'] == provider:\n forecast = d['forecast'][day]\n\n message = f\"ะŸั€ะพะณะฝะพะท ะฟะพะณะพะดั‹ ะฝะฐ {'ัะตะณะพะดะฝั' if day == 'today' else 'ะทะฐะฒั‚ั€ะฐ'} ({(datetime.date.today() if day == 'today' else datetime.date.today() + datetime.timedelta(days=1)).strftime('%A, %e %B')}):\\n\"\n\n for f in forecast:\n if f['time'] in [\"9:00\", \"15:00\", \"21:00\"]:\n message += f\"\"\"\n*{f['time']}* {f['temperature']} {f['description']} {f['emoji']}\n{'ะžัะฐะดะบะธ: ' + f['precipitation'] + ' ะผะผ' if provider == 'gismeteo' else 'ะ’ะตั€ะพัั‚ะฝะพัั‚ัŒ ะพัะฐะดะบะพะฒ: ' + f['precipitation_chance'] + '%'}\nะ’ะตั‚ะตั€: {f['windspeed'] + ' ะผ/c'}\n\"\"\"\n context.bot.send_message(chat_id=update.effective_chat.id, text=message, parse_mode='markdown')", "def get_forecast(location_list):\n #Might need to munge location to get a query out of it\n location, human_location = location_list\n date = datetime.datetime.today()\n query = location\n url = \"http://api.wunderground.com/auto/wui/geo/ForecastXML/index.xml?query=%s\" % query\n f = urllib2.urlopen(url)\n xml = f.read()\n root = ET.XML(xml)\n \n forecast = {'location': location, 'human_location': human_location}\n #Find forecast\n simple = root.find('simpleforecast')\n for day in simple.findall('forecastday'):\n forecast['forecast_date'] = parser.parse(day.find('date').find('pretty').text)\n forecast['high_temp'] = day.find('high').find('fahrenheit').text\n forecast['low_temp'] = day.find('low').find('fahrenheit').text\n forecast['conditions'] = day.find('conditions').text\n forecast['icon'] = day.find('icon').text\n forecast['skyicon'] = day.find('skyicon').text\n try:\n f, created = ForecastDay.objects.get_or_create(**forecast)\n if created:\n f.save()\n except:\n logging.info(\"Long Range Forecast Data missing or already created\")\n \n \n #Find Moon\n moon = root.find('moon_phase')\n illuminated = moon.find('percentIlluminated')\n age = moon.find('ageOfMoon')\n sun_rise = datetime.datetime(date.year, date.month, date.day, **_hour_minute(moon.find('sunrise')))\n sun_set = datetime.datetime(date.year, date.month, date.day, **_hour_minute(moon.find('sunset'))) \n #It doesn't error, so it appears to be doing what it should.\n f = ForecastDay.objects.get(forecast_date=date)\n f.sun_rise = sun_rise\n f.sun_set = sun_set\n f.moon_illuminated = illuminated.text\n f.moon_age = age.text\n try:\n f.save()\n except:\n logging.info(\"Moon Data missing or no new data available\")", "def get_weather(phenny, input):\n import wunderground\n \n report_type = 'conditions'\n\n unicode_input = unicode(input)\n if unicode_input[1:8] == 'weather':\n location_str = unicode_input[9:]\n elif unicode_input[1:3] == 'w ':\n location_str = unicode_input[3:]\n try:\n json_data = wunderground.format_json(location_str, input.weather_API, report_type)\n output_results(phenny, json_data)\n except Exception, e:\n print e\n phenny.say('Could not find results for \"%s\", please reword the search and try again.' % location_str)", "def GetWeatherByLocation():\n Location = GetLocation()\n WeatherUrl =\"http://api.openweathermap.org/data/2.5/weather?\"+ Location +\"&appid=b4bacbe2dc824431289800439f1ec3df&units=metric\"\n WeatherRequest = requests.get(WeatherUrl)\n WeatherInfo = WeatherRequest.json()\n pprint(WeatherInfo)\n WindSpeed = WeatherInfo['wind']['speed']\n pprint(WindSpeed)\n Temp = WeatherInfo['main']['temp']\n Humidity = WeatherInfo['main']['humidity']\n Description = WeatherInfo['weather'][0]['description']\n print(type(Humidity))\n return(Temp, Humidity, Description)", "def combine_weather(weather):\n\n weather1 = weather[weather[\"Station\"] == 1]\n weather2 = weather[weather[\"Station\"] == 2]\n\n\n pass", "def temperatures():\n\n return station_9281", "def test_get_ny_daily_data(self):\n dag = self.dagbag.get_dag(self.dag_id)\n extract_task = dag.get_task('extract')\n resp = self.extract.getDailyNyDataFromAPI()\n self.assertIsNotNone(resp)\n self.assertEqual(type(resp), list)", "def get_weather(lat, lon):\r\n\r\n # API key, retrieved from configure.py\r\n api_key = configure.WEATHER_KEY\r\n\r\n # API endpoint\r\n url = f'https://api.openweathermap.org/data/2.5/onecall?lat={lat}&lon={lon}&appid={api_key}'\r\n\r\n # API call\r\n response = requests.get(url)\r\n\r\n # Collect response in json format\r\n weather = response.json()\r\n\r\n # Interpret Current Weather\r\n current_weather = weather['current']\r\n\r\n # By default, the API returns all requested times in unix format\r\n current_weather['dt'] = epoch_to_human_readable_date(current_weather['dt'])\r\n current_weather['sunrise'] = epoch_to_human_readable_date(current_weather['sunrise'])\r\n current_weather['sunset'] = epoch_to_human_readable_date(current_weather['sunset'])\r\n\r\n # By default, the API returns all temperature values in Kelvin\r\n current_weather['dew_point'] = {'kelvin': current_weather['dew_point'], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(current_weather['dew_point']), 2),\r\n 'celsius': round(kelvin_to_celsius(current_weather['dew_point']), 2)}\r\n\r\n current_weather['feels_like'] = {'kelvin': current_weather['feels_like'], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(current_weather['feels_like']), 2),\r\n 'celsius': round(kelvin_to_celsius(current_weather['feels_like']), 2)}\r\n\r\n current_weather['temp'] = {'kelvin': current_weather['temp'], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(current_weather['temp']), 2),\r\n 'celsius': round(kelvin_to_celsius(current_weather['temp']), 2)}\r\n\r\n # Change icon value to image url to be used in html img tag as src\r\n current_weather['weather'][0]['icon'] = 'http://openweathermap.org/img/wn/' + current_weather['weather'][0]['icon'] + '@2x.png'\r\n\r\n # Interpret Daily Weather\r\n daily_forcast = weather['daily']\r\n\r\n for day in daily_forcast:\r\n # Get readable dates and times\r\n day['dt'] = epoch_to_human_readable_date(day['dt'])\r\n day['sunrise'] = epoch_to_human_readable_date(day['sunrise'])\r\n day['sunset'] = epoch_to_human_readable_date(day['sunset'])\r\n\r\n # Change icon value to image url to be used in html img tag as src\r\n day['weather'][0]['icon'] = 'http://openweathermap.org/img/wn/' + day['weather'][0]['icon'] + '@2x.png'\r\n\r\n\r\n # Convert temperatures in 'feels_like' dictionary from Kelvin to Fahrenheit and Celsius\r\n\r\n for temp in day['feels_like']:\r\n day['feels_like'][temp] = {'kelvin': day['feels_like'][temp], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(day['feels_like'][temp]), 2),\r\n 'celsius': round(kelvin_to_celsius(day['feels_like'][temp]), 2)}\r\n\r\n\r\n # Convert temperatures in 'temp' dictionary from Kelvin to Fahrenheit\r\n\r\n for temp in day['temp']:\r\n day['temp'][temp] = {'kelvin': day['temp'][temp], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(day['temp'][temp]), 2),\r\n 'celsius': round(kelvin_to_celsius(day['temp'][temp]), 2)}\r\n\r\n # Interpret Hourly Weather\r\n hourly_weather = weather['hourly']\r\n\r\n # Only manipulating data for hours of the current date, rest will be ommitted\r\n\r\n curr_date = epoch_to_human_readable_date(hourly_weather[0]['dt']).split(\",\", 1)[1][:3]\r\n\r\n last_hour = 0\r\n\r\n for index, hour in enumerate(hourly_weather):\r\n # Get date in relation to the hour\r\n date = epoch_to_human_readable_date(hour['dt']).split(\",\", 1)[1][:3]\r\n if date != curr_date:\r\n last_hour = index\r\n break\r\n \r\n # Convert temperatures in 'dew_point' dictionary from Kelvin to Fahrenheit and Celsius\r\n hour['dew_point'] = {'Kelvin':hour['dew_point'],\r\n 'fahrenheit': round(kelvin_to_fahrenheit(hour['dew_point']), 2),\r\n 'celsius': round(kelvin_to_celsius(hour['dew_point']), 2)}\r\n\r\n # Get readable dates and times\r\n hour['dt'] = epoch_to_human_readable_date(hour['dt'])\r\n\r\n # Convert temperatures in 'feels_like' dictionary from Kelvin to Fahrenheit and Celsius\r\n hour['feels_like'] = {'kelvin': hour['feels_like'], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(hour['feels_like']), 2),\r\n 'celsius': round(kelvin_to_celsius(hour['feels_like']), 2)}\r\n\r\n # Convert temperatures in 'temp' dictionary from Kelvin to Fahrenheit\r\n hour['temp'] = {'kelvin': hour['temp'], \r\n 'fahrenheit': round(kelvin_to_fahrenheit(hour['temp']), 2),\r\n 'celsius': round(kelvin_to_celsius(hour['temp']), 2)}\r\n\r\n hour['weather'][0]['icon'] = 'http://openweathermap.org/img/wn/' + hour['weather'][0]['icon'] + '@2x.png'\r\n\r\n\r\n return current_weather, daily_forcast, hourly_weather[:last_hour]", "def fetch_sundata(self, date: datetime) -> Sundata:\n pass", "def testWeatherFetch(self):\n\n timeCol = 'timestamp'\n rows = []\n for row in self.aggregator.rawData(dataType = 'weather',\n orderBy = [timeCol],\n timestampCol = timeCol,\n startDate = self.testStart,\n endDate = self.testEnd):\n rows.append(row)\n self.assertIsNotNone(rows, 'Rows are present.')", "def print_daily_summary(weather_data_day: dict):\n date_time = time.localtime(weather_data_day['dt'])\n sunrise_time = time.localtime(weather_data_day['sunrise'])\n sunset_time = time.localtime(weather_data_day['sunset'])\n print(f\"\\nForecast for Vancouver on {date_time[2]}/{date_time[1]}/{date_time[0]}, at {date_time[3]}:00 local time.\")\n print(f\"Temperature low: {weather_data_day['temp']['min']}C. Temperature high: {weather_data_day['temp']['max']}C.\")\n print(f\"Time of sunrise is: {sunrise_time[3]}:{sunrise_time[4]:02} local time.\", end=\" \")\n print(f\"Time of sunset is: {sunset_time[3]}:{sunset_time[4]:02} local time.\")\n print(f\"Humidity (rh) is {weather_data_day['humidity']}%.\")\n print(f\"Wind speed is {weather_data_day['wind_speed']}m/s.\")\n print(f\"The weather is expected to be {weather_data_day['weather'][0]['description']}.\")", "def fetch_weather():\n\n # Fetch the current weather.\n response = requests.get(f'https://api.openweathermap.org/data/2.5/weather?q=Manchester,UK&units=metric&APPID={WEATHER_API_KEY}')\n\n # Return the data.\n return response.json()", "def get_last_location_weather(location=\"Ljubljana\"):\n address = \"http://www.arso.gov.si/vreme/napovedi%20in%20podatki/vreme_avt.html\"\n page = requests.get(address)\n html = page.content\n\n parsed_html = BeautifulSoup(html)\n\n station_data = parsed_html.find(\"table\", attrs={\"class\": \"online\"})\n station_names = [x.string for x in station_data.findAll(\"td\", attrs={\"class\": \"onlineimena\"})]\n station_data = [x for x in station_data.findAll(\"tr\")]\n print station_names\n datad = namedtuple(\"location_weather\", (\"temperature\",\n \"humidity\",\n \"wind_direction\",\n \"wind_speed\",\n \"wind_gusts\",\n \"air_pressure\",\n \"rain\",\n \"sun_radiation\"))\n for i, station_name in enumerate(station_names):\n if station_name.lower() == location.lower():\n print station_name\n data = [x.string for x in station_data[i + 2].findAll(\"td\")]\n data = data[1:]\n d = datad(data[0], data[1], slo_to_eng_compass(data[2]), data[3], data[5], *data[7:])\n return d", "def download():\r\n reader = GSODDataReader()\r\n year_list = range(2001, 2012)\r\n austin = reader.collect_data(year_list, exact_station=True,\r\n station_name='AUSTIN CAMP MABRY', state='TX', country='US')\r\n houston = reader.collect_data(year_list, exact_station=True,\r\n station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')\r\n new_york = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEW YORK/LA GUARDIA', state='NY', country='US')\r\n newark = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEWARK INTL AIRPORT', state='NJ', country='US')\r\n punta_arenas = reader.collect_data(year_list, exact_station=True,\r\n station_name='PUNTA ARENAS', country='CH')\r\n wellington = reader.collect_data(year_list, exact_station=True,\r\n station_name='WELLINGTON AIRPORT', country='NZ')\r\n store = HDFStore('weather.h5')\r\n store['austin'] = austin\r\n store['houston'] = houston\r\n store['nyc'] = new_york\r\n store['newark'] = newark\r\n store['punta_arenas'] = punta_arenas\r\n store['wellington'] = wellington\r\n store.close()", "def get_hourly_weather_details(self, hours: int = None):\n if hours is None:\n hours = 11\n forecast = super().get_weather_forecast(self.BASE_URL)\n headers = [\"date_time\",\n \"temp\",\n \"real_feel_temp\",\n \"wind_speed\",\n \"rain_probability\",\n \"cloud_cover\",\n ]\n for number in range(hours):\n data = []\n date_time = forecast[number]['DateTime']\n date_time = date_time[:16]\n date_time = date_time.replace('T', ' ')\n data.append(date_time)\n temp = round((int(\n forecast[number][\"Temperature\"][\"Value\"]) - 32) / 1.8)\n data.append(temp)\n real_feel_temp = round((int(\n forecast[number][\"RealFeelTemperature\"][\"Value\"]) - 32) / 1.8)\n data.append(real_feel_temp)\n wind_speed = forecast[number][\"Wind\"][\"Speed\"][\"Value\"]\n data.append(wind_speed)\n rain_probability = forecast[number][\"RainProbability\"]\n data.append(rain_probability)\n cloud_cover = forecast[number][\"CloudCover\"]\n data.append(cloud_cover)\n yield dict(zip(headers, data))", "def meteo(station='caqc0177'):\r\n long=getLongForecast(station)\r\n return dict(\r\n title= long[0] + \" - \" + station,\r\n message=datetime.now(),\r\n year=datetime.now().year,\r\n longTerm=long[1],\r\n shortTerm=getShortForecast(station)\r\n )", "def test():\n temp_data = fetch_temp_data(\n (\"https://opendata-download-metobs.smhi.se/api/version/\" +\n \"latest/parameter/1/station/52350/period/latest-day/data.json\"))\n data = temp_series(temp_data)\n print(data)", "def parse_weather(data: DataFrame) -> List[WeatherData]:\n parsed_results = []\n\n for index, row in data.iterrows():\n date = sqlite3.Date(index.year, index.month, index.day)\n item = WeatherData(\n date=date,\n average_temp=celsius_to_fahr(row.get('tavg', 0)),\n precipitation=row.get('prcp', 0),\n )\n parsed_results.append(item)\n return parsed_results", "def ping_darksky(time, key):\n boston = forecast(key, *BOSTON, time=time.isoformat())\n\n fetch = {\n 'day': time,\n 'tempMin': boston[\"daily\"][\"data\"][0].get('temperatureMin', np.nan),\n 'tempMax': boston[\"daily\"][\"data\"][0].get('temperatureMax', np.nan),\n 'summary': boston[\"daily\"][\"data\"][0].get('summary', np.nan),\n 'desc': boston[\"daily\"][\"data\"][0].get('icon', np.nan),\n 'cloudCover': boston[\"daily\"][\"data\"][0].get('cloudCover', np.nan)}\n return fetch", "def get_weather_report(takeoff,weather):\n # HINT: Looping through the dictionary is VERY slow because it is so large\n # You should convert the takeoff time to an ISO string and search for that first.\n # Only loop through the dictionary as a back-up if that fails.\n \n # Search for time in dictionary\n # As fall back, find the closest time before takeoff\n \n from dateutil.parser import parse\n \n result = []\n takeofftime = takeoff.isoformat()\n \n if takeofftime in weather.keys():\n result = weather[takeofftime]\n \n elif takeofftime not in weather.keys():\n weatherlist = list(weather.keys())\n count = len(weatherlist)\n for m in weatherlist[::-1]:\n if m < takeofftime:\n result = weather[m]\n \n else: \n result = None\n \n \n return result", "def weather():\r\n def weather_api_call():\r\n with open('config.json', 'r') as conf:\r\n conf = json.load(conf)\r\n # Gets the API key from the config.json file\r\n weather_api_key = conf[\"weather_api_key\"]\r\n weather_city_name = conf['weather_city_name']\r\n response = requests.get(\r\n 'http://api.openweathermap.org/data/2.5/weather?'\r\n 'q=' + weather_city_name + '&units=metric&appid=' + weather_api_key)\r\n resp_json = response.json()\r\n with open('weather.json', 'w') as outfile:\r\n # Uses the data from the API to overwrite the weather data\r\n json.dump(resp_json, outfile)\r\n outfile.close()\r\n\r\n def weather_data_extractor():\r\n with open('weather.json', 'r') as weather_json:\r\n weather_json = json.load(weather_json)\r\n temp = weather_json[\"main\"]\r\n weather_item = weather_json[\"weather\"]\r\n desc = weather_item[0]\r\n current_temperature = \"The current temperature is: \" + \\\r\n str(int(temp[\"temp\"])) + \"C\"\r\n current_feels_like = \"Feels like: \" + \\\r\n str(int(temp[\"feels_like\"])) + \"C\"\r\n forecast = desc[\"main\"]\r\n return current_feels_like, current_temperature, forecast\r\n\r\n weather_api_call()\r\n return weather_data_extractor()", "def get_day_query_info():\n all_rt_heat_metric_list = get_rt_day_query_count()\n day_query_rt_dict = {}\n for each_rt in all_rt_heat_metric_list:\n query_list = []\n dataset_id = each_rt[\"key\"]\n query_count = each_rt[\"doc_count\"]\n for each_appcode in each_rt[\"app_count\"][\"buckets\"]:\n app_code = each_appcode[\"key\"]\n app_query_count = each_appcode[\"doc_count\"]\n for each_day in each_appcode[\"agg_by_day\"][\"buckets\"]:\n timestamp = each_day[\"key\"] / 1000\n time_str = each_day[\"key_as_string\"]\n day_query_count = each_day[\"doc_count\"]\n query_list.append(\n {\n \"dataset_id\": dataset_id,\n \"app_code\": app_code,\n \"timestamp\": timestamp,\n \"time_str\": time_str,\n \"day_query_count\": day_query_count,\n \"app_query_count\": app_query_count,\n }\n )\n day_query_rt_dict[dataset_id] = {\n \"query_list\": query_list,\n \"query_count\": query_count,\n }\n # ๆœ‰ๆŸฅ่ฏข้‡็š„rt\n day_query_rt_list = list(day_query_rt_dict.keys())\n return day_query_rt_dict, day_query_rt_list", "def import_weather(keys):\n # imports weather and cleans\n df_all_weather = get_weather_as_df(keys)\n return clean_weather_df(df_all_weather)", "def getHourlyWeather(self, keyword, temp, last_hour):\n\n\t\t# Variables\n\t\tconditions = []\n\t\tweather = {}\n\n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\n\t\tif fio.has_hourly() is True:\n\t\t\thourly = FIOHourly.FIOHourly(fio)\n\n\t\t\t# Getting weather forecast for next 12 hours\n\t\t\tfor hour in xrange(1, last_hour):\n\t\t\t\tfor item in hourly.get_hour(hour).keys():\n\t\t\t\t\t# Parsing data from hourly fio object and adding it to weather dictionary\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"temperature\":\n\t\t\t\t\t\tif temp == \"f\":\n\t\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item]).split(\".\")[0] + \"ยฐ F\"\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item]).split(\".\")[0] + \"ยฐ C\"\n\t\t\t\t\tif item == \"humidity\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\t\tif item == \"time\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\t\tif item == \"windSpeed\":\n\t\t\t\t\t\twindSpeed = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"windBearing\":\n\t\t\t\t\t\twindBearing = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\t\twindBearing = self.helper.convertWindBearing(windBearing)\n\t\t\t\t\t\tweather[\"wind\"] = windBearing + \" \" + windSpeed + \" mph\"\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item] * 100).split(\".\")[0] + \"%\"\n\n\t\t\t\t# Populating conditions array with weather dicitonary\n\t\t\t\tconditions.append(weather)\n\t\t\t\tweather = {}\n\t\telse:\n\t\t\treturn 'No hourly data'\n\t\treturn conditions", "def fetch_weather(y):\r\n # request parameter(s): Start with '?'\r\n # separate name and value with '='\r\n # multiple parameter name value pairs are separate with '&'\r\n query_string = \"?id={}&units=imperial&APIKEY={}\".format(y, API_KEY)\r\n request_url = WS_URL + query_string\r\n print(\"Request URL: \", request_url)\r\n response = requests.get(request_url)\r\n if response.status_code == 200:\r\n city_name = response.json()[\"city\"][\"name\"]\r\n lst = response.json()[\"list\"]\r\n tmp_list = []\r\n for i in range(len(lst) // 8):\r\n li = [x for x in range(len(lst)) if x // 8 == i]\r\n tmp_list.append(max([lst[j][\"main\"][\"temp_max\"] for j in li]))\r\n return City(city_name, tmp_list)\r\n else:\r\n print(\"How should I know?\")\r\n return None", "def update_weather(location_request, db):\n with open(expanduser(\"~/bin/my_utilities/config/darksky-key\")) as f:\n ds_key = f.readline().strip()\n current = []\n current_day = 0\n with forecast(ds_key, *location_request, units=\"uk2\") as location:\n raw = location['hourly']['data'][0]\n current.append(datetime.datetime.now().hour)\n current.append(day_relative_to_absolute(current_day))\n current.append(raw[\"temperature\"])\n current.append(raw[\"apparentTemperature\"])\n current.append(raw[\"precipIntensity\"])\n current.append(raw[\"precipProbability\"] * 100)\n current.append(raw[\"humidity\"] * 100)\n current.append(raw[\"dewPoint\"])\n current.append(raw[\"windSpeed\"])\n current.append(raw[\"windBearing\"])\n current.append(raw[\"windGust\"])\n current.append(raw[\"pressure\"])\n current.append(raw[\"cloudCover\"] * 100)\n current.append(raw[\"uvIndex\"])\n current.append(raw[\"visibility\"])\n current = format_list_for_db(current)\n\n columns = [\"hour\", \"day\", \"temp\", \"apptemp\", \"precipint\", \"precipprob\",\n \"humidity\", \"dewpoint\", \"windspeed\", \"windbearing\",\n \"windgust\", \"pressure\", \"cloudcover\", \"uvindex\", \"visibility\"]\n columns = format_list_for_db(columns)\n statement = f\"INSERT INTO WEATHER {columns} VALUES {current}\"\n print(statement)\n cursor = db.cursor()\n cursor.execute(statement)\n cursor.close()", "def get_current(location_list):\n import re\n import feedparser\n location, human_location = location_list\n city, state = human_location.split(',')\n url = \"http://rss.wunderground.com/auto/rss_full/%s/%s.xml\" % (state.strip(), city.strip())\n feed = feedparser.parse(url)\n s = feed.entries[0].summary\n current = {'location': location, 'human_location': human_location}\n \n current['observation_time'] = parser.parse(feed.entries[0].updated)\n temperature = re.compile('Temperature: ([\\d\\.]+)')\n current['temperature'] = temperature.search(s).group(1)\n humidity = re.compile('Humidity: (\\d+)')\n current['humidity'] = humidity.search(s).group(1)\n conditions = re.compile('Conditions: ([\\w\\s]+)')\n current['conditions'] = conditions.search(s).group(1)\n windspeed = re.compile('Wind Speed: ([\\d\\.]+)')\n current['wind_speed'] = windspeed.search(s).group(1)\n winddirection = re.compile('Wind Direction: (\\w+)')\n current['wind_direction'] = winddirection.search(s).group(1)\n try:\n f = Forecast(**current)\n f.save()\n except:\n logging.info(\"Current Forecast Data missing or no new data available\")", "def test():\n #Our API key\n api_key = \"5c5a1a440d3b0e89239368a9a8fb251b\"\n #latitude - The latitude of the location for the forecast\n lat = 13.7522222\n #longitude - The longitude of the location for the forecast\n lng = 100.4938889\n #datetime.datetime(year, month, day[, hour[, minute[, second[, microsecond[, tzinfo]]]]])\n year = 2015\n month = 11\n day = 8\n hour = 1\n minute = 22\n second = 33\n current_time = datetime.datetime.now()\n time = current_time\n #specific date\n #time = datetime.datetime(year, month, day, hour, minute, second, 44, pytz.UTC)\n forecast = forecastio.load_forecast(api_key, lat, lng, time=time)\n day = time.day\n the_dict = dict()\n the_list = list()\n #number of the day needs\n for i in range(2):\n forecast2 = forecastio.load_forecast(api_key, lat, lng, time=time)\n by_hour2 = forecast2.hourly()\n print(\"DAY:\",day,time.strftime(\"%B\"), file=myfile)\n for hourly_data_point in by_hour2.data:\n the_weather = \"\"+str(hourly_data_point)[30:].split(' at')[0].strip()\n the_list.append(the_weather)\n d = dict()\n for c in the_list:\n if c not in d:\n d[c] = 1\n else:\n d[c] = d[c] + 1\n sorted_d = sorted(d.items(), key=operator.itemgetter(0))\n sorted_d.reverse()\n print (sorted_d, file=myfile)\n #myfile.write(sorted_d)\n forecast2 = forecastio.load_forecast(api_key, lat, lng, time=datetime.datetime(2015, 11, day, 1, 22, 33, 44, pytz.UTC))\n print(forecast2.hourly().summary, file=myfile)\n #myfile.write(forecast2.hourly().summary)\n time += datetime.timedelta(days=1)\n day = time.day\n the_list = list()\n myfile.close()\n #Reset day for next method\n day = time.day\n print(\"===========Currently Data=========\")\n print(forecast.currently())\n #print(\"===========Hourly Data=========\")\n by_hour = forecast.hourly()\n #print(\"Hourly Summary: %s\" % (by_hour.summary))\n #print(\"bhs\",by_hour.summary)\n for hourly_data_point in by_hour.data:\n break\n #print(hourly_data_point)\n #print(str(hourly_data_point)[30:].split(' at')[0])\n #print(\"===========Daily Data=========\")\n by_day = forecast.daily()\n #API no longer support this point\n #print(\"Daily Summary: %s\" % (by_day.summary))\n for daily_data_point in by_day.data:\n break\n print(daily_data_point)", "def _get_dict_weather_data(self, weather_current):\n\n returned_dict = dict()\n returned_dict[\"weather_status\"] = weather_current.get_detailed_status()\n\n time_format = '%H:%M'\n if self.am_pm_time:\n time_format = '%I:%M %p'\n\n returned_dict[\"sunset\"] = datetime.fromtimestamp(weather_current.get_sunset_time()).strftime(time_format)\n returned_dict[\"sunrise\"] = datetime.fromtimestamp(weather_current.get_sunrise_time()).strftime(time_format)\n\n returned_dict[\"temperature\"] = int(round(weather_current.get_temperature(unit=self.temp_unit)[\"temp\"]))\n returned_dict[\"temperature_min\"] = int(round(weather_current.get_temperature(unit=self.temp_unit)[\"temp_min\"]))\n returned_dict[\"temperature_max\"] = int(round(weather_current.get_temperature(unit=self.temp_unit)[\"temp_max\"]))\n\n returned_dict[\"pressure\"] = weather_current.get_pressure()[\"press\"]\n returned_dict[\"sea_level_pressure\"] = weather_current.get_pressure()[\"sea_level\"]\n\n returned_dict[\"humidity\"] = weather_current.get_humidity()\n\n wind = weather_current.get_wind()\n wind_deg = wind.get(\"deg\", None)\n wind_speed = wind.get(\"speed\", None)\n returned_dict[\"wind_deg\"] = wind_deg\n returned_dict[\"wind_speed\"] = wind_speed\n\n snow_current = weather_current.get_snow()\n snow_current = snow_current.get('all', None)\n rain_current = weather_current.get_rain()\n rain_current = rain_current.get('all', None)\n returned_dict[\"rainfall\"] = rain_current\n returned_dict[\"snow\"] = snow_current\n\n returned_dict[\"clouds_coverage\"] = weather_current.get_clouds()\n\n return returned_dict", "def daily_search(term, state='', property=''):\n geo = \"\"\n if state == '':\n geo = state = 'US'\n else:\n geo = 'US-' + state\n\n out = service.getGraph(terms=term,\n restrictions_startDate='2008-01',\n restrictions_endDate='2008-07',\n restrictions_geo=geo,\n restrictions_property=property).execute().get('lines')[0].get('points')\n\n next = service.getGraph(terms=term,\n restrictions_startDate='2008-07',\n restrictions_endDate='2009-01',\n restrictions_geo=geo,\n restrictions_property=property).execute().get('lines')[0].get('points')\n\n #out['orig_value'] = out['value']\n #next['orig_value'] = next['value']\n\n multiplier = 1\n if next[30].get('value') != 0:\n multiplier = out[-1].get('value')/next[30].get('value')\n if multiplier == 0:\n multiplier = 1\n\n for i in next:\n i['value'] = i['value']*multiplier\n\n out = out + next[31:]\n\n for i in range(2009,2019):\n print(term + ', ' + property + ', ' + str(i))\n n = i + 1\n for j in range(1,3):\n if j == 1:\n s = str(i) + '-01'\n e = str(i) + '-07'\n else:\n s = str(i) + '-07'\n e = str(i + 1) + '-01'\n\n next = service.getGraph(terms=term,\n restrictions_startDate=s,\n restrictions_endDate=e,\n restrictions_geo=geo,\n restrictions_property=property).execute().get('lines')[0].get('points')\n\n #next['orig_value'] = next['value']\n\n multiplier = 1\n if next[30].get('value') != 0:\n multiplier = out[-1].get('value')/next[30].get('value')\n if multiplier == 0:\n multiplier = 1\n\n for k in next:\n k['value'] = k['value']*multiplier\n\n out = out + next[31:]\n\n\n\n\n if property == '':\n property = 'web'\n\n df1 = pd.DataFrame(out)\n\n df1['term'] = term\n df1['state'] = state\n df1['property'] = property\n\n return df1", "def predict_energy_consumption(buildings):\n forecasts = [forecast_for_building(building) for i, building in buildings.iterrows()]\n df = pd.concat(forecasts)\n df.drop(columns=\"id\", inplace=True)\n df = buildings.merge(df, left_on=\"id\", right_on=\"building_id\")\n df[\"meter\"] = 0\n df[\"floor_count\"] = df[\"floorcount\"]\n df[\"air_temperature\"] = df[\"temp\"]\n df[\"relative_humidity\"] = df[\"humidity\"]\n df[\"dew_temperature\"] = df[\"air_temperature\"] - ((100 - df[\"relative_humidity\"]) / 5)\n df[\"precip_depth_1_hr\"] = np.nan\n df[\"timestamp\"] = pd.to_datetime(df[\"date\"])\n df[\"wind_direction\"] = df[\"deg\"]\n df[\"wind_speed\"] = df[\"speed\"]\n\n df.drop(columns=[\"id\", \"name\", \"floorcount\", \"latitude\", \"longitude\", \"user_id\", \"temp\", \"feels_like\", \"temp_min\",\n \"temp_max\", \"pressure\", \"sea_level\", \"grnd_level\", \"humidity\", \"temp_kf\", \"main\", \"description\",\n \"icon\", \"speed\", \"deg\", \"date\"], inplace=True)\n\n df_temp = df.copy(deep=True)\n for i in range(1, 4):\n df_temp[\"meter\"] += 1\n df = pd.concat([df, df_temp])\n del df_temp\n\n cfg = {\n 'circular_timestamp_encoding': False,\n 'log_transform_square_feet': True,\n 'log_transform_area_per_floor': True,\n 'label_square_feet_outlier': True,\n 'label_area_per_floor_outlier': True,\n 'encode_wind_direction': False,\n 'include_feels_like': True,\n 'fill_na_with_zero': False,\n 'add_lag_features': True,\n 'lag_columns': ['air_temperature', 'dew_temperature', 'cloud_coverage'],\n 'lag_windows': [6, 24],\n }\n [df] = build_features(df, cfg=cfg)\n\n df.reset_index(inplace=True, drop=True)\n building_ids = df[\"building_id\"]\n timestamps = df[\"timestamp\"]\n df.drop(columns=[\"timestamp\", \"month\", \"wind_direction\", \"wind_speed\", \"building_id\"], inplace=True)\n\n model_endpoint = \"http://model:5001/predict\"\n data = df.to_json()\n response = requests.get(model_endpoint, json=data).json()\n\n predictions = pd.DataFrame({\"reading\": response[\"prediction\"],\n \"building_id\": building_ids,\n \"meter\": df[\"meter\"],\n \"timestamp\": timestamps,\n \"air_temperature\": df[\"air_temperature\"]})\n return predictions", "def weather_of_wind(city):\n pattern = re.compile(r'.*(\\d+).*')\n\n time_index = np.load(exp_data_path + os.sep + 'station_list' + os.sep + 'time_index.npy', allow_pickle=True)\n time_index = dict(time_index.tolist())\n numpy_res = np.empty((len(time_index['index']),))\n with open(exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_wind.csv'.format(city)) as f:\n reader = csv.reader(f)\n for line in reader:\n if 'ๅพฎ' in line[1]:\n line[1] = 0\n else:\n line[1] = pattern.match(line[1]).group(1)\n numpy_res[int(line[0])] = int(line[1])\n\n file_name = exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_wind'.format(city)\n if os.path.exists(file_name):\n os.remove(file_name)\n np.save(file_name, numpy_res)\n pass", "def get_weather_forecast_comparison(user_city=\"lagos\", explored_city=\"london\", days=7):\n user_city_forecast = weather_client.getForecastWeather(q=user_city, days=days)['forecast']['forecastday']\n explored_city_forecast = weather_client.getForecastWeather(q=explored_city, days=days)['forecast']['forecastday']\n weather_forecast_comparison = zip(map(get_weather_info, user_city_forecast), map(get_weather_info, explored_city_forecast))\n return weather_forecast_comparison", "def fetch_data(self, table_name: str, year: int) -> list:\n with DBOperations(self.name) as dbcm:\n dbcm.execute(f\"select * from {table_name} where sample_date like '{year}%';\")\n fetch_weather = dbcm.fetchall()\n\n return fetch_weather", "def populate_weather(connection):\n metadata = load_metadata('weather')\n cursor = connection.cursor()\n water_defs = get_water_definitions()\n\n # Check if tables are already populated.\n cursor.execute('SELECT count(*) FROM weather')\n weather_count = cursor.fetchone()[0]\n\n if weather_count:\n print('Weather tables already populated!')\n return\n\n print('WEATHER:')\n\n # Darksky data\n for dir_name, location in metadata.items():\n print(f'\\tPopulating weather: \"{location[\"name\"]}\".')\n\n # Insert location.\n cursor.execute(f'''INSERT INTO locations(name, lat, lng)\n VALUES ('{location['name']}', {location['lat']}, {location['lng']})''')\n location_id = cursor.lastrowid\n\n # Set weather locations for watercourses/aquifers.\n for water_body in [d['body'] for d in water_defs.values()]:\n if water_body in location:\n cursor.execute(f'''UPDATE {water_body}s\n SET location_id = {location_id}\n WHERE name IN ('{\"','\".join(location[water_body])}')''')\n break\n\n dir_path = get_data_path('weather', 'raw', dir_name)\n for json_file_name in os.listdir(dir_path):\n json_path = os.path.join(dir_path, json_file_name)\n with open(json_path, 'r', encoding='utf-8') as json_file:\n print(f'\\t\\tPopulating year: {json_file_name[0:-5]}')\n year_forecasts = json.load(json_file)\n for date, date_forecast in year_forecasts.items():\n hourly_forecasts = date_forecast['hourly']\n\n if not hourly_forecasts:\n print(f'\\t\\tNo hourly forecasts for {date}!')\n continue\n\n daily_forecast = {\n 'location_id': location_id,\n 'time': date_forecast['time'],\n 'day_time': date_forecast['sunset_time'] - date_forecast['sunrise_time'],\n 'precipitation': 0,\n 'snow_accumulation': 0\n }\n # List of value names with `avg`, `min` and `max` values\n value_names = {\n 'temperature': 'temperature',\n 'cloud_cover': 'cloudCover',\n 'dew_point': 'dewPoint',\n 'humidity': 'humidity',\n 'pressure': 'pressure',\n 'uv_index': 'uvIndex',\n 'precipitation_probability': 'precipProbability',\n 'precipitation_intensity': 'precipIntensity'\n }\n # Value name counters, which indicate how many times (out of 24)\n # certain value appears in hourly data.\n value_counts = {k: 0 for k in value_names.keys()}\n\n for value_name in value_names.keys():\n daily_forecast[f'{value_name}_avg'] = 0.0\n daily_forecast[f'{value_name}_min'] = float('inf')\n daily_forecast[f'{value_name}_max'] = float('-inf')\n\n # Calculate daily forecast values from hourly forecasts.\n for hourly_forecast in hourly_forecasts:\n for value_name in value_names.keys():\n orig_value_name = value_names[value_name]\n if is_forecast_number(orig_value_name, hourly_forecast):\n daily_forecast[f'{value_name}_avg'] += hourly_forecast[orig_value_name]\n daily_forecast[f'{value_name}_min'] = min(\n hourly_forecast[orig_value_name],\n daily_forecast[f'{value_name}_min']\n )\n daily_forecast[f'{value_name}_max'] = max(\n hourly_forecast[orig_value_name],\n daily_forecast[f'{value_name}_max']\n )\n value_counts[value_name] += 1\n\n if is_forecast_number('precipAccumulation', hourly_forecast) \\\n and hourly_forecast['precipType'] == 'snow':\n daily_forecast['snow_accumulation'] += hourly_forecast['precipAccumulation']\n elif is_forecast_number('precipIntensity', hourly_forecast) \\\n and is_forecast_number('precipProbability', hourly_forecast):\n daily_forecast['precipitation'] += \\\n hourly_forecast['precipIntensity'] * hourly_forecast['precipProbability']\n\n for value_name, value_count in value_counts.items():\n if value_count:\n # Calculate average.\n daily_forecast[f'{value_name}_avg'] = daily_forecast[f'{value_name}_avg'] / value_count\n else:\n # If value never appeared\n daily_forecast[f'{value_name}_avg'] = 'NULL'\n daily_forecast[f'{value_name}_min'] = 'NULL'\n daily_forecast[f'{value_name}_max'] = 'NULL'\n\n cursor.execute(f'''INSERT INTO weather({', '.join(daily_forecast.keys())})\n VALUES ({', '.join([str(v) for v in daily_forecast.values()])})''')\n\n # IOT data:\n for location in SETTINGS['weather_locations_iot']:\n print(f'\\tPopulating weather: \"{location[\"name\"]}\".')\n\n # Insert location.\n cursor.execute(f'''INSERT INTO locations(name, lat, lng)\n VALUES ('{location['name']}', {location['lat']}, {location['lng']})''')\n location_id = cursor.lastrowid\n\n # Set weather locations for watercourses/aquifers.\n for water_body in [d['body'] for d in water_defs.values()]:\n if water_body in location:\n cursor.execute(f'''UPDATE {water_body}s\n SET location_id = {location_id}\n WHERE name IN ('{\"', '\".join(location[water_body])}')''')\n\n # Set locations for all stations on given water body to match its location.\n cursor.execute(f'''SELECT id\n FROM {water_body}s\n WHERE location_id = {location_id}''')\n ids = [row[0] for row in cursor.fetchall()]\n if len(ids):\n cursor.execute(f'''UPDATE {water_body}_stations\n SET location_id = {location_id}\n WHERE {water_body}_id IN ({', '.join([str(v) for v in ids])})''')\n\n break \n \n file_name = f'''{location['lat']}-{location['lng']}.json'''\n json_path = get_data_path('weather', 'raw', file_name)\n\n # If data file doesn't exist, download it first.\n if not os.path.isfile(json_path):\n with open(json_path, 'wb', encoding=\"utf-8\") as file:\n file.write(read_from_url(location['url'], decode=False))\n \n with open(json_path, 'r', encoding='utf-8') as json_file:\n row_names = {\n \"Sun_duration\": \"sun_duration\",\n \"CloudCover\": \"cloud_cover_avg\",\n \"Percipitation\": \"precipitation\",\n \"New_snow_blanket\": \"snow_accumulation\",\n \"Snow_blanket\": \"snow_depth\",\n \"TemperatureAvg\": \"temperature_avg\",\n \"TemperatureMin\": \"temperature_min\",\n \"TemperatureMax\": \"temperature_max\"\n }\n forecasts = json.load(json_file)\n for forecast in forecasts:\n f = {row_names[k]: forecast[k] for k in row_names.keys()}\n f['location_id'] = location_id\n f['time'] = round(forecast['LastUpdatedEpoch'] / 1000)\n cursor.execute(f'''INSERT INTO weather({', '.join(f.keys())})\n VALUES ({', '.join([str(v) for v in f.values()])})''')", "def calculate_world_daywise(countries_daywise_df):", "def forecast_weekly():\n forecast = get_forecast()\n daily = forecast.daily()\n return daily.summary", "def read_weatherstations(path_to_data):\n namedict = read_weatherstationnames(path_to_data)\n stations = {}\n for i in namedict:\n filename = namedict[i].replace(' ', '_') + '.csv'\n print(\"Reading\", filename)\n ws = read_station_csv(os.path.join(path_to_data, filename))\n stations[i] = ws\n return stations", "def _do_checkWeather(self, mjd, w, config):\n # Convert mjd to the relevant time units of the weather dates.\n time = (mjd - config['sim_start'] + config['%s_start' %(w)]) * _day2sec\n # And wrap the time, if we need to. \n time = time % self.maxtime[w]\n # Find the observations which are closest in time to our requested time.\n time_order = (abs(self.dates[w] - time)).argsort()\n date1 = self.dates[w][time_order[0]]\n date2 = self.dates[w][time_order[1]]\n weather1 = self.weather[w][time_order[0]]\n weather2 = self.weather[w][time_order[1]]\n # Do interpolation for weather at this particular time.\n weather = (weather2 - weather1) / (date2 - date1) * (time - date1) + weather1\n return weather, weather1", "def details(weather):\n\treturn \"\"\"<table class=\"forecast bg-success\"><tr><th colspan=\"2\" class=\"text-center lead\">Weather for {location} at {time}<th></tr>\n\t<tr><td>Temp: {temperature}<i class=\"wi wi-celsius\"></i> Feels Like: {feelsLike}<i class=\"wi wi-celsius\"></i></td><td rowspan=\"9\"><img src=\"map.gif?{latitude},{longitude}\" width=\"600\" height=\"371\"/><td></tr>\n\t<tr><td>Low: {low}<i class=\"wi wi-celsius\"></i> High: {high}<i class=\"wi wi-celsius\"></i></td></tr>\n\t<tr><td>Sunrise <i class=\"wi wi-sunrise\"></i>: {sunrise} Sunset <i class=\"wi wi-sunset\"></i>: {sunset}</td></tr>\n\t<tr><td>Wind: {windSpeed} kph from {windBearing} <i class=\"wi wi-wind.towards-{windDirection}-deg\"></i></td></tr>\n\t<tr><td>Summary <i class=\"wi wi-{icon}\"></i>: {summary}</td></tr>\n\t<tr><td></td></tr>\n\t<tr><td></td></tr>\n\t<tr><td></td></tr>\n\t<tr><td></td></tr>\n\t<tr><td>&nbsp;</td><td>&nbsp;</td></tr>\n\t</table>\"\"\".format(**weather)", "def get_temperature_data(zone):\n\n zone = zone[1:len(zone)-1]\n temp_response = {}\n conn = sqlite3.connect(os.path.abspath('database.db'))\n\n # get temperatures data\n query = \"Select temp_date, temp_max From temperature Left join fire_danger_zone on temperature.temp_station=fire_danger_zone.fdz_station Where fire_danger_zone.fdz_station == '\" + zone + \"' and temperature.temp_date >= date('2010-01-01') Order by temperature.temp_date;\"\n dataframe = pd.read_sql_query(query, conn) \n temperatures = dataframe['temp_max'].values.tolist()\n\n # get dates\n dates = dataframe['temp_date'].values.tolist()\n \n # add data in dictionary \n data_name = 'temp_'+zone\n temp_response[data_name] = temperatures\n temp_response['labels'] = dates\n \n # return data\n response = jsonify(temp_response)\n response.headers.add('Access-Control-Allow-Origin', '*')\n \n # close database connection\n conn.close()\n return response", "def Wind_Prediction():\n model = pickle.load(open('wind_model.pkl','rb'))\n \n # Load DataFrame\n # Wind\n longitude = 53.556563\n latitude = 8.598084\n\n url = ('https://api.openweathermap.org/data/2.5/onecall?lat=8.598084&lon=53.556563&units=imperial&appid=43e49f2fb4d17b806dfff389f21f4d27')\n response = requests.get(url)\n\n weather = response.json()\n dailynorm = json_normalize(weather, 'daily')\n \n df = pd.DataFrame(dailynorm)\n wind_df = df[['dt', 'wind_speed', 'wind_deg']].copy()\n wind_df['date'] = pd.to_datetime(wind_df['dt'],unit='s')\n wind_df['day'] = wind_df['date'].dt.day\n wind_df['month'] = wind_df['date'].dt.month\n wind_df = wind_df.fillna(0)\n wind_df.rename(columns={'wind_speed':'wind speed', 'wind_deg':'direction'}, inplace=True)\n wind_df = wind_df.drop(['dt','date'], axis = 1)\n Xnew = wind_df.values\n p_pred = model.predict(Xnew)\n p_pred = pd.DataFrame(p_pred)\n p_pred.columns = ['Predicted Power']\n final_wind_df = pd.concat([wind_df, p_pred], axis = 1)\n dictionaryObject = final_wind_df.to_dict()\n \n return dictionaryObject", "def predict_bike_demand(weather_data):\n # TODO: connect to the real deal!\n return {s[\"extra\"][\"uid\"]: random.randint(0, 11) for s in MOCK_STATION_STATS}", "def getAllDataFromDirectory(prediction_directory, actual_directory, write_directory, cities_file, utc_offset = False):\n city_dictionary = getCities(cities_file)\n actualGetter = getActualWeather(actual_directory, city_dictionary, get_API_keys())\n #For each day and for each city, get all the data and put it into a spreadsheet.", "def getData(tme=currentTime):\n # attempts request 10 times\n for attempt in range(10):\n try:\n # make a request to the url and return it in json format\n url = \"https://api.darksky.net/forecast/%s/%s,%s,%s?exclude=minutely,hourly,daily,alerts,flags\" % (API_KEY, LAT, LNG, tme)\n return get(url).json()\n except:\n # Wait .05 seconds and try again\n sleep(.05)\n pass", "async def fetch_hourly_data(self, day=None):\n self._logger.info(\"Fetching hourly data for %s\", day)\n await self._client.select_customer(self.account_id, self.customer_id)\n await self._client.select_customer(self.account_id, self.customer_id)\n\n if day is None:\n # Get yesterday\n yesterday = datetime.now() - timedelta(days=1)\n day_str = yesterday.strftime(\"%Y-%m-%d\")\n elif hasattr(day, \"strftime\"):\n day_str = day.strftime(\"%Y-%m-%d\")\n else:\n try:\n datetime.strptime(day, \"%Y-%m-%d\")\n except ValueError:\n print(\"Start date bad format. It must match %Y-%m-%d\")\n return\n day_str = day\n\n params = {\"dateDebut\": day_str, \"dateFin\": day_str}\n res = await self._client.http_request(HOURLY_DATA_URL_2, \"get\",\n params=params, )\n # We can not use res.json() because the response header are not application/json\n json_res = json.loads(await res.text())\n\n if len(json_res.get('results')) == 0:\n self._hourly_data[day_str] = {\n 'day_mean_temp': None,\n 'day_min_temp': None,\n 'day_max_temp': None,\n 'hours': {},\n }\n tmp_hour_dict = dict((h, {'average_temperature':None}) for h in range(24))\n else:\n self._hourly_data[day_str] = {\n 'day_mean_temp': json_res['results'][0]['tempMoyJour'],\n 'day_min_temp': json_res['results'][0]['tempMinJour'],\n 'day_max_temp': json_res['results'][0]['tempMaxJour'],\n 'hours': {},\n }\n tmp_hour_dict = dict((h, {}) for h in range(24))\n for hour, temp in enumerate(json_res['results'][0]['listeTemperaturesHeure']):\n tmp_hour_dict[hour]['average_temperature'] = temp\n\n raw_hourly_weather_data = []\n if len(json_res.get('results')) == 0:\n # Missing Temperature data from Hydro-Quebec (but don't crash the app for that)\n raw_hourly_weather_data = [None]*24\n else:\n raw_hourly_weather_data = json_res['results'][0]['listeTemperaturesHeure']\n\n params = {\"date\": day_str}\n res = await self._client.http_request(HOURLY_DATA_URL_1, \"get\", params=params)\n # We can not use res.json() because the response header are not application/json\n json_res = json.loads(await res.text())\n for hour, data in enumerate(json_res['results']['listeDonneesConsoEnergieHoraire']):\n tmp_hour_dict[hour]['lower_price_consumption'] = data['consoReg']\n tmp_hour_dict[hour]['higher_price_consumption'] = data['consoHaut']\n tmp_hour_dict[hour]['total_consumption'] = data['consoTotal']\n self._hourly_data[day_str]['hours'] = tmp_hour_dict.copy()\n\n #Also copy the raw hourly data from hydroquebec (This can be used later for commercial accounts, mostly 15 minutes power data)\n self._hourly_data_raw[day_str] = {\n 'Energy': json_res['results']['listeDonneesConsoEnergieHoraire'],\n 'Power': json_res['results']['listeDonneesConsoPuissanceHoraire'],\n 'Weather': raw_hourly_weather_data\n }", "def generate_polynesian_weather_data():\n weather_path = os.path.dirname(os.path.realpath(__file__))\n low_fp = weather_path + \"/polynesia_weather/low/1976/\"\n med_fp = weather_path + \"/polynesia_weather/med/1985/\"\n high_fp = weather_path + \"/polynesia_weather/high/1982/\"\n low_name = \"polynesia_1976\"\n med_name = \"polynesia_1985\"\n high_name = \"polynesia_1982\"\n generate_year_weather_data(low_fp, low_name)\n generate_year_weather_data(med_fp, med_name)\n generate_year_weather_data(high_fp, high_name)", "def get_wind_data(zone):\n\n zone = zone[1:len(zone)-1]\n wind_response = {}\n conn = sqlite3.connect(os.path.abspath('database.db'))\n\n # get wind data\n query = \"Select wind_date, wind_speed From wind_velocity Left join fire_danger_zone on wind_velocity.wind_station=fire_danger_zone.fdz_station Where fire_danger_zone.fdz_station == '\" + zone + \"' and wind_velocity.wind_date >= date('2010-01-01') Order by wind_velocity.wind_date;\"\n dataframe = pd.read_sql_query(query, conn) \n wind = dataframe['wind_speed'].values.tolist()\n\n # get dates\n dates = dataframe['wind_date'].values.tolist()\n \n # add data in dictionary \n data_name = 'wind_'+zone\n wind_response[data_name] = wind\n wind_response['labels'] = dates\n \n # return data\n response = jsonify(wind_response)\n response.headers.add('Access-Control-Allow-Origin', '*')\n \n # close database connection\n conn.close()\n return response", "def __weather_api_call(\n self, time: datetime, location: tuple, index: int,\n ) -> Weather:\n URL = (\n 'https://weather.visualcrossing.com/VisualCrossingWebServices'\n + '/rest/services/weatherdata/history?'\n )\n time_start = time.strftime('%Y-%m-%dT%H:%M:%S')\n # time_end = (time + timedelta(hours=1, seconds=0)\n # ).strftime('%Y-%m-%dT%H:%M:%S')\n location0_str = f'{location[0]:.5f}'\n location1_str = f'{location[1]:.5f}'\n\n PARAMS = {\n 'aggregateHours': 1,\n 'combinationMethod': 'aggregate',\n 'startDateTime': time_start,\n 'endDateTime': time_start,\n 'maxStations': -1,\n 'maxDistance': -1,\n 'contentType': 'json',\n 'unitGroup': self.unit_group,\n 'locationMode': 'single',\n 'key': self.vc_api_key,\n 'dataElements': 'all',\n 'locations': f'{location0_str}, {location1_str}',\n }\n # sending get request and saving the response as response object\n r = requests.get(url=URL, params=PARAMS)\n # extracting data in json format\n response_data = r.json()\n data_values = response_data['location']['values'][0]\n return Weather(\n temperature=data_values['temp'],\n maximum_temperature=data_values['maxt'],\n minimum_temperature=data_values['mint'],\n wind_chill=data_values['windchill'],\n heat_index=data_values['heatindex'],\n precipitation=data_values['precip'],\n snow_depth=data_values['snowdepth'],\n wind_speed=data_values['wspd'],\n wind_direction=data_values['wdir'],\n sea_level_pressure=data_values['sealevelpressure'],\n visibility=data_values['visibility'],\n cloud_cover=data_values['cloudcover'],\n dew_point=data_values['dew'],\n solar_radiation=data_values['solarradiation'],\n relative_humidity=data_values['humidity'],\n weather_type=data_values['weathertype'],\n conditions=data_values['conditions'],\n date=time,\n location=location,\n index=index,\n )", "def read_FMI_weatherdata(forcfile, fyear,lyear, asdict=False):\n \n #OmaTunniste;OmaItรค;OmaPohjoinen;Kunta;siteid;vuosi;kk;paiva;longitude;latitude;t_mean;t_max;t_min;\n #rainfall;radiation;hpa;lamposumma_v;rainfall_v;lamposumma;lamposumma_cum\n #-site number\n #-date (yyyy mm dd)\n #-latitude (in KKJ coordinates, metres)\n #-longitude (in KKJ coordinates, metres)\n #-T_mean (degrees celcius)\n #-T_max (degrees celcius)\n #-T_min (degrees celcius)\n #-rainfall (mm)\n #-global radiation (per day in kJ/m2)\n #-H2O partial pressure (hPa)\n\n from datetime import datetime\n #forcfile='c:\\\\pyspace\\\\DATAT\\\\Topmodel_calibr\\\\FMI_saa_Porkkavaara.csv'\n\n #import forcing data\n dat=np.genfromtxt(forcfile,dtype=float,delimiter=';', usecols=(5,6,7,10,11,12,13,14,15,16))\n\n fi=np.where(dat[:,0]>=fyear); li=np.where(dat[:,0]<=lyear)\n ix=np.intersect1d(fi,li); #del fi, li\n #print min(ix), max(ix), np.shape(ix)\n tvec=dat[ix,0:3] #YYYY MM DD\n\n dat=dat[ix, 3:] \n\n time=[]; doy=[]\n for k in range(0,len(tvec)):\n time.append(datetime( int(tvec[k,0]), int(tvec[k,1]), int(tvec[k,2]), 0, 0) )\n doy.append(time[k].timetuple().tm_yday)\n \n time=np.array(time)\n doy=np.array(doy)\n \n Ta=dat[:,0];Tmax=dat[:,1]; Tmin=dat[:,2]; Prec=dat[:,3]; Rg=1e3*dat[:,4]/86400.0; Par=Rg*0.5 #from kJ/m2/d-1 to Wm-2 \n e=1e-1*dat[:,5]; #hPa-->kPa\n dds=dat[:,6] #temperature sum\n\n #saturated vapor pressure \n esa=0.6112*np.exp((17.67*Ta)/ (Ta +273.16 -29.66)) #kPa\n vpd=esa - e; #kPa \n vpd[vpd<0]=0.0\n rh=100.0*e/esa;\n rh[rh<0]=0.0; rh[rh>100]=100.0\n \n F={'Ta':Ta, 'Tmin':Tmin, 'Tmax':Tmax, 'Prec':Prec, 'Rg':Rg, 'Par': Par, 'VPD':vpd, 'RH':rh, 'esa':esa, 'h2o':e, 'dds':dds}\n\n F['time']=time\n F['doy']=doy\n \n ix=np.where(np.isnan(F['Prec'])); \n F['Prec'][ix]=0.0\n #del dat, fields, n, k, time\n \n if asdict is not True:\n #return pandas dataframe\n F=pd.DataFrame(F)\n cols=['time', 'doy', 'Ta', 'Tmin','Tmax', 'Prec', 'Rg', 'Par', 'VPD', 'RH', 'esa', 'h2o', 'dds']\n F=F[cols]\n return F", "def Fetch_station(long, lat, y):\r\n global ddf\r\n dmin = 1000000\r\n rs = 0\r\n i=0\r\n for i in range(len(ddf[y])):\r\n #Calculate the distance between zip code location and weather station location\r\n dnew = Distance_orthonormique(ddf[y]['LON'][i], ddf[y]['LAT'][i], long, lat)\r\n\r\n if(dmin > dnew):\r\n #If the last smaller distance is superior than the current distance :\r\n #the new smaller distance is the current distance\r\n dmin = dnew\r\n rs = i\r\n\r\n #rs = index dataframe weather station\r\n #ddf[y]['STATION NAME'][rs] = Weather station name\r\n #round(dmin, 2) = Distance between weather station and zip code\r\n \r\n return rs, ddf[y]['STATION NAME'][rs], round(dmin,2)", "def get_weather(self, time=None, location=None):\n req = requests.get(self.source_url)\n text = req.text\n moment = self.extract_datetime(text)\n met_data = self.parse_hms_data(text)\n met_data['time'] = moment\n met_data['text'] = text\n return self.source_label, met_data", "def collect(self, start_date=None, end_date=None):\n if start_date is None:\n start_date = self.default_start\n if end_date is None:\n end_date = self.default_end\n\n cur = self.conn.cursor()\n\n # Maximum return is 1000 entries\n num_days = 1000 // len(self.stations)\n # Maximum date-range is 1 year\n if num_days > 365:\n num_days = 365\n\n for interval in netzero.util.time_intervals(\n start_date, end_date, days=num_days\n ):\n netzero.util.print_status(\n \"Weather\",\n \"Collecting: {} to {}\".format(\n interval[0].strftime(\"%Y-%m-%d\"), interval[1].strftime(\"%Y-%m-%d\")\n ),\n )\n\n # TODO -- REMOVE ASSUMPTION THAT LEN(DATA) < LIMIT\n raw_data = self.query_api(interval[0], interval[1])\n\n if raw_data is None:\n print(\"ERROR QUERYING API\") # TODO exception here?\n continue\n\n for entry in raw_data.get(\"results\", []):\n # Insert the weather data to the table, to be averaged later\n date = datetime.datetime.strptime(\n entry[\"date\"], \"%Y-%m-%dT%H:%M:%S\"\n ).date()\n value = entry[\"value\"]\n station = entry[\"station\"]\n\n cur.execute(\n \"INSERT OR IGNORE INTO weather VALUES (?, ?, ?)\", (date, value, station)\n )\n\n self.conn.commit()\n\n cur.close()\n\n netzero.util.print_status(\"Weather\", \"Complete\", newline=True)", "def weather_of_weather_type(city):\n # get all weather type, and make a map relation\n check_all_type('weather_type')\n relation_type_map = {'้˜ด': '้˜ด', 'ๅฐ้›จ่ฝฌ้˜ด': 'ๅฐ้›จ', 'ไธญ้›จ': 'ไธญ้›จ', 'ๅฐ้›จ': 'ๅฐ้›จ',\n 'ๅคšไบ‘่ฝฌๅฐ้›จ': 'ๅฐ้›จ', 'ไธญ้›ช': '้›ช', 'ๅฐ้›จ่ฝฌ้›ช': 'ๅฐ้›จ',\n 'ๅคšไบ‘': 'ๅคšไบ‘', '้œพ': '้œพ', 'ๆ™ด': 'ๆ™ด', '้˜ด่ฝฌๅฐ้›จ': 'ๅฐ้›จ'}\n relation_index = {big_type: index for index, big_type in enumerate(sorted(set(relation_type_map.values())))}\n relation_index_map = {small_type: relation_index[big_type] for small_type, big_type in relation_type_map.items()}\n # use relation map to get a weather npy file\n\n time_index = np.load(exp_data_path + os.sep + 'station_list' + os.sep + 'time_index.npy', allow_pickle=True)\n time_index = dict(time_index.tolist())\n numpy_res = np.empty((len(time_index['index']),))\n with open(exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_weather_type.csv'.format(city)) as f:\n reader = csv.reader(f)\n for line in reader:\n numpy_res[int(line[0])] = relation_index_map[line[1]]\n\n file_name = exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_weather_type'.format(city)\n if os.path.exists(file_name):\n os.remove(file_name)\n np.save(file_name, numpy_res)", "def query(self, lon, lat):\n def distance(lon1, lat1, lon2, lat2):\n return (lon2 - lon1) ** 2 + (lat2 - lat1) ** 2\n\n min_distance = sys.maxint\n weather = {}\n for w in self._weather:\n d = distance(lon, lat, w['lon'], w['lat'])\n if d < min_distance:\n min_distance = d\n weather = w\n\n return dict(temp=weather['temp'],\n humidity=weather['humidity'],\n weather_code=weather['weather_code'])", "def get_typical_days(weather_data, cfg):\n settings = cfg['settings']\n # Flag to determine if any holidays have been found:\n interpolation_freq = pd.Timedelta(settings['intervall'])\n flag_holidays_found = False\n\n # --- Season --------------------------------------------------------------\n # The 'season' (transition, summer or winter) is defined by the daily\n # average of the ambient temperature.\n\n # Resample ambient temperatures in DataFrame to days and take mean\n tamb_avg_list = weather_data['TAMB'].resample('D', label='right',\n closed='right').mean()\n\n # Write the daily mean values to all original time steps\n tamb_avg_list = tamb_avg_list.reindex(weather_data.index)\n tamb_avg_list.fillna(method='backfill', inplace=True)\n\n season_list = []\n\n # The VDI 4655 default heat limit is 15ยฐC (definition of summer days).\n # For low- and zero-energy houses, the average daily temperatures have\n # to be adapted to the actual conditions. (see VDI 4655, page 15)\n Tamb_heat_limit = settings.get('Tamb_heat_limit', 15) # ยฐC\n\n # Read through list of temperatures line by line and apply the definition\n for tamb_avg in tamb_avg_list:\n if tamb_avg < 5:\n season_list.append('W') # Winter\n elif tamb_avg > Tamb_heat_limit:\n season_list.append('S') # Summer\n else:\n season_list.append('U') # รœbergang (Transition)\n\n # Alternative season determination method:\n # From 'BDEW Standardlastprofile':\n season_list_BDEW = get_season_list_BDEW(weather_data)\n\n # Save the results in the weather_data DataFrame\n weather_data['TAMB_d'] = tamb_avg_list\n if settings.get('use_BDEW_seasons', False) is False:\n weather_data['season'] = season_list\n elif settings.get('use_BDEW_seasons', False) is True:\n weather_data['season'] = season_list_BDEW\n weather_data['season'].replace(to_replace={'Winter': 'W',\n 'Sommer': 'S',\n 'รœbergangszeit': 'U'},\n inplace=True)\n\n # Store the BDEW seasons separately\n weather_data['season_BDEW'] = season_list_BDEW\n\n steps_per_day = 24 / (interpolation_freq.seconds / 3600.0)\n settings['steps_per_day'] = steps_per_day\n logger.debug('Number of days in winter: ' +\n str(season_list.count('W')/steps_per_day))\n logger.debug('Number of days in summer: ' +\n str(season_list.count('S')/steps_per_day))\n logger.debug('Number of days in transition: ' +\n str(season_list.count('U')/steps_per_day))\n\n # Use https://pypi.org/project/holidays/ for holiday-detection\n used_holidays = []\n if settings.get('holidays'):\n country = settings['holidays'].get('country', 'DE')\n province = settings['holidays'].get('province', None)\n used_holidays = holidays.country_holidays(country, subdiv=province)\n\n # Read through list of days line by line and see what kind of day they are.\n # Problem: In the weather data, the bins are labeled on the 'right'\n # (Each time stamp describes the interval before). Therefore the time stamp\n # midnight (00:00:00) describes the last interval of the day before.\n # However, asking for the weekday of a midnight time stamp gives the name\n # of the next day. Thus the resulting list of weekdays is shifted by one\n # time step.\n weekdays_list = []\n weekdays_list_BDEW = []\n for date_obj in weather_data.index:\n if date_obj.dayofweek == 6: # 6 equals Sunday\n weekdays_list.append('S')\n weekdays_list_BDEW.append('Sonntag')\n elif date_obj in used_holidays:\n weekdays_list.append('S')\n weekdays_list_BDEW.append('Sonntag')\n flag_holidays_found = True\n elif date_obj.dayofweek == 5: # 5 equals Saturday\n weekdays_list.append('W')\n weekdays_list_BDEW.append('Samstag')\n else:\n weekdays_list.append('W')\n weekdays_list_BDEW.append('Werktag')\n\n # Solution to problem: We take the first list entry, then add the rest of\n # the list minus the very last entry.\n weather_data['weekday'] = [weekdays_list[0]] + weekdays_list[:-1]\n weather_data['weekday_BDEW'] = [weekdays_list_BDEW[0]] + \\\n weekdays_list_BDEW[:-1]\n\n # Print a warning, if necessary\n if flag_holidays_found is False:\n logger.warning('Warning! No holidays were found for the chosen time!')\n\n # --- Cloud cover amount --------------------------------------------------\n ccover_avg_list = weather_data['CCOVER'].resample('D', label='right',\n closed='right').mean()\n ccover_avg_list = ccover_avg_list.reindex(weather_data.index)\n ccover_avg_list.fillna(method='backfill', inplace=True)\n # The interpolation to 15min may cause a slight difference of daily means\n # compared to 60min, in rare cases shifting from >5.0 to <5.0.\n # Rounding to the first decimal place may prevent this issue.\n ccover_avg_list = ccover_avg_list.round(decimals=1)\n\n # Read through list of cloud cover line by line and apply the definition\n cloudy_list = []\n for ccover_avg in ccover_avg_list:\n if (ccover_avg < 5.0):\n cloudy_list.append('H')\n else:\n cloudy_list.append('B')\n\n weather_data['cloudy'] = cloudy_list\n\n # Combine the gathered information from season, weekday and cloudyness\n # into one 'typtag' key\n weather_data['typtag'] = weather_data['season'] + \\\n weather_data['weekday'] + weather_data['cloudy']\n\n # For summer days, the VDI 4655 makes no distinction in terms of cloud\n # amount. So we need to replace 'heiter' and 'bewรถlkt' with 'X'\n typtage_replace = {'typtag':\n {'SWH': 'SWX', 'SWB': 'SWX', 'SSH': 'SSX', 'SSB': 'SSX'}\n }\n weather_data.replace(to_replace=typtage_replace, inplace=True)", "def tafs(station, hours_before_now=24, most_recent=True):\n return aviation_weather('tafs', station, hours_before_now, most_recent)" ]
[ "0.6687217", "0.65223783", "0.64728045", "0.64251363", "0.6395729", "0.63380027", "0.6210881", "0.61134636", "0.59816873", "0.59385264", "0.589162", "0.58533895", "0.5828796", "0.58262205", "0.58206254", "0.57987386", "0.576516", "0.5762254", "0.5755952", "0.57303375", "0.57172894", "0.5711101", "0.57051396", "0.569737", "0.56901556", "0.5689204", "0.56835365", "0.5657169", "0.5653262", "0.56502265", "0.5621427", "0.56165236", "0.55880034", "0.5578355", "0.5578136", "0.5566801", "0.55634546", "0.5556372", "0.5548798", "0.5536802", "0.5533617", "0.55232716", "0.5517435", "0.55069035", "0.55063576", "0.5505806", "0.5501975", "0.54965633", "0.54933304", "0.54924077", "0.547572", "0.546574", "0.5453769", "0.54531914", "0.5450091", "0.5430014", "0.54299194", "0.5422652", "0.5416368", "0.5403221", "0.53979546", "0.53935367", "0.53893113", "0.53886646", "0.53875685", "0.53875506", "0.5374534", "0.5374532", "0.53743356", "0.537375", "0.53655994", "0.5361482", "0.5360063", "0.53576314", "0.5354068", "0.5353699", "0.53447425", "0.5343971", "0.53362846", "0.5334201", "0.53261477", "0.5319985", "0.5318282", "0.53171676", "0.5315052", "0.5306196", "0.53027034", "0.530247", "0.53021854", "0.5297389", "0.5296729", "0.5296354", "0.52910966", "0.5286925", "0.5286561", "0.5284181", "0.52818614", "0.5277886", "0.5261175", "0.52607447" ]
0.6820833
0
This function gives us the sum of all sales value for a given product on a given date. It also return the quantity of products sold in weight
def get_volume_product_on_date(product_barcode, date, store_id, transactions): transactions_day = transactions[(transactions['STO_EAN'] == store_id) & (transactions['BARCODE'] == product_barcode) & (transactions['TRX_DATETIME'] >= pd.to_datetime(date).date()) &(transactions['TRX_DATETIME'] < (pd.to_datetime(date) + pd.DateOffset(1)))] # If the transaction does not exist return None if transactions_day.empty: return None return {"price": np.sum(transactions_day['SAL_AMT_WTAX'].values), "weight": np.sum(transactions_day['SAL_UNIT_QTY_WEIGHT'].values)}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_daily_product_sales(self):\n self.products['daily'] = self.products['session_start_date'].apply(lambda x: convert_dt_to_day_str(x))\n self.products = self.products.query(\"payment_amount == payment_amount\")\n self.products['payment_amount'] = self.products['payment_amount'].apply(lambda x: float(x))\n self.daily_products = self.products.reset_index().groupby([\"daily\", \"products\"]).agg(\n {\"payment_amount\": \"sum\", 'index': 'count'}).reset_index().rename(columns={\"index\": \"order_count\"})\n return self.daily_products", "def sum_amounts(product_df):\r\n\r\n summed_product_df = pd.DataFrame(columns=product_df.columns.values)\r\n product_names = product_df['product name'].unique() # get product names\r\n\r\n # check if product_df has a volume column\r\n if product_df.shape[1] > 3:\r\n has_volume = True\r\n else:\r\n has_volume = False\r\n\r\n for p in product_names:\r\n act_product_df = product_df[product_df['product name'] == p]\r\n actual_unit = act_product_df['unit'].values[0] # store unit of current product\r\n if act_product_df.shape[0] > 1: # check if there is more than one row for current product\r\n summed_amount = act_product_df['amount'].sum()\r\n if has_volume:\r\n summed_volume = act_product_df['volume'].sum()\r\n summed_product_df = summed_product_df.append(\r\n {'product name': p, 'amount': summed_amount, 'volume': summed_volume, 'unit': actual_unit},\r\n ignore_index=True)\r\n else: # no volume in product_df\r\n summed_product_df = summed_product_df.append(\r\n {'product name': p, 'amount': summed_amount, 'unit': actual_unit}, ignore_index=True)\r\n else:\r\n summed_product_df = summed_product_df.append(act_product_df, ignore_index=True)\r\n\r\n return summed_product_df", "def total_sales():\n data = []\n orders = Order.objects.all()\n for order in orders:\n data.append(order.get_total_cost())\n return sum(data)", "def weekly_sales(self):\n last_seven_day = timezone.now() - timedelta(days=7)\n items = self.item_set.filter(status=\"sold\", updated_at__gte=last_seven_day)\n total_sales = 0\n for item in items:\n total_sales += item.price\n return total_sales", "def view_total_sales_by_date(start_date, end_date=None):\n # Later will add the ability to sort by date and Category\n try:\n with session_scope() as db_session:\n if end_date is not None:\n if validate(start_date) and validate(end_date):\n pass\n else:\n return '', 404\n orders = db_session.query(Order).filter(Order.date.between(start_date, end_date)).all()\n else:\n if validate(start_date):\n pass\n else:\n return '', 404\n orders = db_session.query(Order).filter(Order.date == start_date).all()\n if len(orders) < 1:\n return {\n 'code': 404,\n 'message': 'There are no sales'\n }, 404\n\n nmbr_itm = 0\n for order in orders:\n for items in order.order_lines:\n nmbr_itm = nmbr_itm + items.quantity\n\n except DBAPIError as db_error:\n # Returns an error in case of a integrity constraint not being followed.\n return {\n 'code': 400,\n 'message': re.search('DETAIL: (.*)', db_error.args[0]).group(1)\n }, 400\n except NoResultFound:\n # Returns an error in case of a integrity constraint not being followed.\n return {\n 'code': 400,\n 'message': \"No sales have been registered\"\n }, 400\n return {\n 'numberItems': nmbr_itm\n }, 200", "def accumulate_prices(name,products,sales,types,add):\r\n return reduce(add,[get_prices_by_type(name,products,sales,types)[i] for i in get_prices_by_type(name,products,sales,types)])", "def total_sales(self):\n total_sales = 0\n items = self.item_set.filter(status=\"sold\")\n for item in items:\n total_sales += item.price\n return total_sales", "def tot(self, prop=\"ๅŸบ้‡‘็Žฐๅ€ผ\", date=yesterdayobj()):\n res = 0\n for fund in self.fundtradeobj:\n res += fund.dailyreport(date).iloc[0][prop]\n return res", "def prepare_date_for_all_warehouses_sheets(self,product,product_data_dict,opening_qty,last_sales,last_purchase_date,qty_purchase_in_duration,qty_sales_in_duration,scrap_location_qty,adjusted_qty_in_duration,warehouse_in_qty,warehouse_out_qty):\n if last_purchase_date: \n last_purchase_date = datetime.strptime(last_purchase_date, '%d-%m-%Y')\n if last_sales:\n last_sales = datetime.strptime(last_sales, '%d-%m-%Y')\n if product_data_dict.has_key(product):\n product_data = product_data_dict.get(product)\n old_opening_qty = product_data.get('opening_qty')\n new_opening_qty = product_data.get('opening_qty') + opening_qty \n \n new_last_sales = product_data.get('last_sales')\n new_last_sales.append(last_sales) \n \n new_last_purchase_date_lst = product_data.get('last_purchase_date')\n new_last_purchase_date_lst.append(last_purchase_date)\n \n old_qty_purchase_in_duration = product_data.get('qty_purchase_in_duration')\n new_qty_purchase_in_duration = old_qty_purchase_in_duration + qty_purchase_in_duration\n \n old_qty_sales_in_duration = product_data.get('qty_sales_in_duration')\n new_qty_sales_in_duration = old_qty_sales_in_duration + qty_sales_in_duration\n \n old_scrap_location_qty = product_data.get('scrap_location_qty')\n new_scrap_location_qty = old_scrap_location_qty + scrap_location_qty\n \n old_adjusted_qty_in_duration = product_data.get('adjusted_qty_in_duration')\n new_adjusted_qty_in_duration = old_adjusted_qty_in_duration + adjusted_qty_in_duration\n \n old_warehouse_in_qty = int(product_data.get('warehouse_in_qty') or 0)\n new_warehouse_in_qty = old_warehouse_in_qty + warehouse_in_qty or 0\n \n old_warehouse_out_qty = int(product_data.get('warehouse_out_qty') or 0)\n new_warehouse_out_qty = old_warehouse_out_qty + warehouse_out_qty or 0\n \n product_data.update({'opening_qty':new_opening_qty,'last_sales':new_last_sales,\n 'last_purchase_date':new_last_purchase_date_lst,'qty_purchase_in_duration':new_qty_purchase_in_duration,\n 'qty_sales_in_duration': new_qty_sales_in_duration,'scrap_location_qty':new_scrap_location_qty,\n 'adjusted_qty_in_duration':new_adjusted_qty_in_duration,\n 'warehouse_in_qty':new_warehouse_in_qty,'warehouse_out_qty':new_warehouse_out_qty\n })\n \n product_data_dict.update({product:product_data})\n return product_data_dict\n \n product_data_dict.update({product:{\n 'opening_qty':opening_qty or 0,'last_sales':[last_sales or ''],\n 'last_purchase_date':[last_purchase_date],'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,'warehouse_out_qty':warehouse_out_qty or 0\n }})\n return product_data_dict", "def weighted_sum(self):\n return sum(self.wvalues)", "def SumCostByDay(dateOfPayment):\n\n logs.logger.debug(\n \"Start to adds all amount of Cost objects based on the payment date.\")\n try:\n searchedCostByDayFromDB = GetAllCostByDateOfPaymentFromDB(dateOfPayment)\n sumTotal = 0\n for item in searchedCostByDayFromDB:\n sumTotal += item.amount\n logs.logger.info(\n \"Based on the payment date adds all amount of Cost objects.\")\n return sumTotal\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def get_product_flow():\n return sum([i.get_total_flow('ton/day') for i in LAOs.products]) * LAOs.LAOs_tea.operating_days", "def view_total_sales():\n # Later will add the ability to sort by date and Category\n try:\n with session_scope() as db_session:\n orders = db_session.query(Order).all()\n\n if len(orders) < 1:\n return {\n 'code': 404,\n 'message': 'There are no sales'\n }, 404\n\n nmbr_itm = 0\n for order in orders:\n for items in order.order_lines:\n nmbr_itm = nmbr_itm + items.quantity\n\n except DBAPIError as db_error:\n # Returns an error in case of a integrity constraint not being followed.\n return {\n 'code': 400,\n 'message': re.search('DETAIL: (.*)', db_error.args[0]).group(1)\n }, 400\n except NoResultFound:\n # Returns an error in case of a integrity constraint not being followed.\n return {\n 'code': 400,\n 'message': \"No sales have been registered\"\n }, 400\n return {\n 'numberItems': nmbr_itm\n }, 200", "def running_total(date_list):\n return sum(d.price for d in date_list)", "def transactions_sum_of_date(self, request):\n user = User.objects.get(id=request.data[\"user\"])\n user_transactions = user.transactions.all()\n user_transaction_by_date = user_transactions.filter(date=request.data[\"date\"])\n spended = user_transaction_by_date.aggregate(Sum('amount'))\n result = Summ(request.data[\"date\"], spended.get('amount__sum'))\n serializer = SumSerializer(result)\n return Response(serializer.data)", "def daily_price():\n for item in data:\n if valid_date(item):\n yield data[item]['daily_value']", "def weekly_benefit(self):\n total_purchase_price = 0\n total_selling_price = 0\n last_seven_day = timezone.now() - timedelta(days=7)\n items = self.item_set.filter(status=\"sold\", updated_at__gte=last_seven_day)\n for item in items:\n total_purchase_price += item.price\n total_selling_price += item.selling_price\n benefit = total_selling_price - total_purchase_price\n return benefit", "def print_total_value():\n sum = 0.0\n for item in data:\n sum += (item['price'] * item['stock'])\n\n print(f\"total stock value = {sum}\")", "def prepare_data_with_warehouse(self,from_date,to_date,warehouses,all_products):\n data_dict = {}\n stock_quant_obj=self.env['stock.quant']\n for warehouse in warehouses:\n all_locations = self.get_all_locations(warehouse)\n if not all_locations:\n continue\n \n #here we are finding the opening stock for these we are using base query\n #of inventory at date v10\n result = self.get_product_qty(all_locations,from_date)\n qty_dict = dict((x,y) for x, y in result)\n \n for product in all_products:\n last_sales = ''\n qty_purchase_in_duration = 0\n qty_sales_in_duration = 0\n last_purchase_date = ''\n scrap_location_qty = 0\n adjusted_qty_in_duration = 0\n warehouse_out_qty = 0\n warehouse_in_qty = 0\n# here from result of inventory at date we are seaching for specific product.\n opening_product_qty = qty_dict.get(product.id)\n\n #finding last sales qty\n last_sales = self.find_last_sales_qty(from_date,to_date,warehouse,all_locations,product)\n #finding last purchase date of product\n last_purchase_date = self.find_last_purchase_date(from_date,to_date,all_locations,product)\n #fiding date purchase qty in duration for specific product\n qty_purchase_in_duration = self.find_purchase_qty_in_duration(from_date,to_date,all_locations,product)\n #fiding scrap qty of precific product\n scrap_location_qty = self.find_scap_location_qty(from_date,to_date,product,all_locations)\n #finding sales qty in duration\n qty_sales_in_duration = self.find_sale_qty_in_duration(from_date,to_date,warehouse,all_locations,product)\n #fidning adjusted qty in duration\n adjusted_qty_in_duration = self.find_adjusted_qty_in_duration(from_date, to_date, product, all_locations)\n \n dest_location_lst = self.get_other_wahouse_locations(warehouse)\n \n if any(all_locations) and any(dest_location_lst):\n #fidning warehouse in qty \n warehouse_in_qty = self.find_warehouse_transer_in_qty(product, all_locations, dest_location_lst,from_date,to_date)\n #fidning warehouse out qty for specific product.\n warehouse_out_qty = self.find_warehouse_transer_out_qty(product, all_locations, dest_location_lst,from_date,to_date)\n \n if warehouse_out_qty:\n warehouse_out_qty = warehouse_out_qty and warehouse_out_qty[0][0] or ''\n if warehouse_in_qty:\n warehouse_in_qty = warehouse_in_qty and warehouse_in_qty[0][0] or ''\n \n if adjusted_qty_in_duration:\n adjusted_qty_in_duration = adjusted_qty_in_duration and adjusted_qty_in_duration[0][0] or '' \n if scrap_location_qty:\n scrap_location_qty = scrap_location_qty and scrap_location_qty[0][0] or ''\n \n # if qty_sales_in_duration:\n # qty_sales_in_duration = qty_sales_in_duration and qty_sales_in_duration[0][0] or ''\n # if qty_purchase_in_duration:\n # qty_purchase_in_duration = qty_purchase_in_duration[0][0] or ''\n if last_sales:\n last_sales = datetime.strptime(last_sales and last_sales[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if last_purchase_date:\n last_purchase_date = datetime.strptime(last_purchase_date and last_purchase_date[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if data_dict.has_key(warehouse.id):\n data_lst=data_dict.get(warehouse.id)\n data_lst.append({'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,'last_sales':last_sales or '',\n 'last_purchase_date':last_purchase_date or '','qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0\n ,'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0 \n })\n data_dict.update({warehouse.id:data_lst})\n continue\n data_dict.update({warehouse.id:[{'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,\n 'last_sales':last_sales or '','last_purchase_date':last_purchase_date or '',\n 'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,\n 'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0\n }]})\n return data_dict", "def get_the_sum_of_prices_from_table(table, item_ids):\n\n # your code", "def monthly_sales(self):\n last_thirty_days = timezone.now() - timedelta(days=30)\n items = self.item_set.filter(status=\"sold\", updated_at__gte=last_thirty_days)\n total_sales = 0\n for item in items:\n total_sales += item.price\n return total_sales", "def compute_consumption_per_day(self):\n start = datetime.now()\n tz = pytz.timezone(self.env.user.tz)\n dt = pytz.utc.localize(start).astimezone(tz)\n schedule = self.env['ir.config_parameter'].get_param('calc_orderpoint.schedule', '0 6').split()\n run = False\n for begin, end in zip(schedule[::2], schedule[1::2]):\n if dt.hour >= int(begin) and dt.hour < int(end):\n run = True\n break\n if run:\n now = fields.Datetime.now()\n location_ids = eval(self.env['ir.config_parameter'].get_param('calc_orderpoint.location_ids', '[]'))\n limit = timedelta(minutes=float(self.env['ir.config_parameter'].get_param('calc_orderpoint.time_limit', '4')))\n _logger.warn('Starting compute_consumption_per_day.')\n products = self.env['product.template'].search(\n [\n '|',\n ('product_variant_ids.sale_ok', '=', True),\n ('sale_ok', '=', True),\n ('last_sales_count', '=', False),\n '|',\n ('earliest_sales_count', '=', False),\n ('earliest_sales_count', '<', now)\n ],\n limit=int(self.env['ir.config_parameter'].get_param(\n 'calc_orderpoint.product_limit', '30')))\n if not products:\n products = self.env['product.template'].search(\n [\n '|',\n ('product_variant_ids.sale_ok', '=', True),\n ('sale_ok', '=', True),\n '|',\n ('earliest_sales_count', '=', False),\n ('earliest_sales_count', '<', now)\n ],\n order='last_sales_count asc',\n limit=int(self.env['ir.config_parameter'].get_param(\n 'calc_orderpoint.product_limit', '30')))\n _logger.warn('Computing compute_consumption_per_day for the following products: %s' % products)\n for product in products:\n try:\n product._consumption_per_day()\n product.write({\n 'last_sales_count': fields.Datetime.now(),\n 'earliest_sales_count': False,\n })\n if (datetime.now() - start) > limit:\n break\n except:\n tb = traceback.format_exc()\n tomorrow = fields.Datetime.to_string(fields.Datetime.from_string(fields.Datetime.now()) + timedelta(1))\n subject = 'compute_consumption_per_day failed to compute %s (%s)' % (product.display_name, product.id)\n body = 'Earliest recompute attempt set to %s.\\n\\n%s' % (tomorrow, tb)\n _logger.warn('%s. %s' % (subject, body))\n product.earliest_sales_count = tomorrow\n product.message_post(body=body.replace('\\n', '<br/>'), subject=subject, type='notification')\n \n _logger.warn('Finished compute_consumption_per_day.')", "def get_sum_of_sales_per_customer_from_table(table):\n summed_sales_per_customer = {}\n for customer in {line[CUSTOMER_ID] for line in table}:\n sum_of_sales = common.szum_list([line[PRICE] for line in table if line[CUSTOMER_ID] == customer])\n summed_sales_per_customer[customer] = sum_of_sales\n return summed_sales_per_customer", "def print_sum():\n\n sum = 0\n for item in data:\n sum += item[\"price\"]\n\n print(f\"The sum is: {sum}\")", "def get_day_returns(self, stocks=None, date=None):\n if stocks is None:\n stocks = self.stocks\n\n if date is None:\n date = self.date\n\n if type(date) is not datetime.datetime and type(date) is not pd.tslib.Timestamp:\n date = datetime.datetime.strptime(date, \"%Y-%m-%d\")\n\n stock_money = 0\n for stock in stocks:\n stock_day = self.stock_data[stock]\n # TODO find a better way than avging open and cloase\n stock_money += stock_day.position['Position'][date] *\\\n (stock_day.market['Close'][date] + stock_day.market['Open'][date])/2\n\n return stock_money", "def sum_values(self):\n raise NotImplementedError", "def monthly_benefit(self):\n \"\"\"Calculate weekly benefit of this company from this day\"\"\"\n total_purchase_price = 0\n total_selling_price = 0\n last_thirty_days = timezone.now() - timedelta(days=30)\n items = self.item_set.filter(status=\"sold\", updated_at__gte=last_thirty_days)\n for item in items:\n total_purchase_price += item.price\n total_selling_price += item.selling_price\n benefit = total_selling_price - total_purchase_price\n return benefit", "def get_the_sum_of_prices(item_ids):\n\n table = data_manager.get_table_from_file(\"sales/sales.csv\")\n return get_the_sum_of_prices_from_table(table, item_ids)", "def get_the_sum_of_prices(item_ids):\n\n # your code", "def somme(self) -> Numeric:\n return query_sum(self.offres(), \"prix\", output_field=models.DecimalField())", "def get_sale_prices():\n\n r = requests.post(settings.qv_url, data=REQUEST_DATA)\n response = r.json()\n\n data_processed = [process_property(prop) for prop in response['LocalAreaSales']]\n\n return data_processed", "def onchange_quantity_sum(self,cr,uid,ids,lines,qty,context=None):\n if context is None:\n context = {}\n total = 0\n res = {'value':{}}\n for line in lines:\n total = total + round(line[2]['quantity'],4)\n diff = round(qty - total,4)\n if diff < 0 :\n diff = 0 \n res = {'value':{'qty_total':total,'qty_res':diff}}\n return res", "def total_profit(knapsack, items, weight):\n return knapsack[items][weight]", "def sales(self):\n\n # Scalar to range (consider creating function for this)\n if np.isscalar(self.demand_decr):\n self.demand_decr = np.full((1, self.num_years - 1), self.demand_decr)\n\n # Year 1 demand\n # This is the key to multi-period models - how to best do this?\n if np.isscalar(self.yr1_demand):\n sales_array = np.zeros((1, self.num_years))\n else:\n sales_array = np.zeros((len(self.yr1_demand), self.num_years))\n\n sales_array[:, 0] = self.yr1_demand\n\n # Compute sales in subsequent years\n for t in range(1, self.num_years):\n sales_array[:, t] = (1 - self.demand_decr[:, t - 1]) * sales_array[:, t - 1]\n\n return sales_array", "def weighted_sum(data, dim=None, weights=None):\n if isinstance(data, xr.DataArray):\n return weighted_sum_da(data, dim, weights)\n elif isinstance(data, xr.Dataset):\n return weighted_sum_ds(data, dim, weights)\n else:\n raise ValueError('Data must be an xarray Dataset or DataArray')", "def get_summary(self, df):\n results_df = pd.DataFrame({'Energy kWh': self.get_all_periods(df).sum()})\n results_df['Prices $/kWh'] = self.deliveryPrice + self.get_rates()\n results_df['Value $'] = results_df['Energy kWh'] * results_df['Prices $/kWh']\n return(results_df)", "def sum(self):\n return np.dot(self.data.T, self.weights)", "def get_total_price(self):\n i = self.get_copy_with_resolved_dependencies()\n total_price = Decimal(0)\n for product in i['products']:\n billed_price = Decimal(str(product.get('price', 0))) * Decimal(str(product.get('quantity')))\n total_price += billed_price\n return total_price", "def somme(self) -> Numeric:\n return query_sum(\n self.offre_set.filter(valide=True),\n \"prix\",\n output_field=models.DecimalField(),\n )", "def som(getallenlijst):\r\n total = sum(getallenlijst)\r\n return total", "def compute_price(self, date = None):\n\t\tif date is None:\n\t\t\tdate = datetime.now()\n\t\tself.price = 0\n\t\t# Getting list of product in cart\n\t\tcontent = self.cart.cart_content_set.all()\n\t\t# Dictionnary in order to compute minimum state of multi promotion\n\t\tstate = {\n\t\t\t'products':{},\n\t\t\t'promotions':{}\n\t\t}\n\t\trequirements = {}\n\n\t\tfor element in content:\n\t\t\tproduct = element.product\n\t\t\tquantity = element.quantity\n\n\t\t\t# First look for promotion\n\t\t\tsimple_promotions = product.promotion_set.filter(end__gte = date, type = 's').distinct('reference', 'end').order_by('-end', 'reference')\n\t\t\tmulti_promotions = product.promotion_set.filter(end__gte = date, type = 'm').distinct('reference', 'end').order_by('-end', 'reference')\n\t\t\tif len(simple_promotions)>0:\n\t\t\t\tpromotion = simple_promotions[0]\n\t\t\t\tself.price = self.price + quantity*promotion.after\n\t\t\t\n\t\t\telif len(multi_promotions)>0:\n\t\t\t\tfor promotion in multi_promotions:\n\t\t\t\t\tprice_before = promotion.before\n\t\t\t\t\tprice_after = promotion.after\n\t\t\t\t\tcontent = [ (p, 1) for p in promotion.content.all()]\n\t\t\t\t\tfound, requirement = self.get_promotion_requirement(content, price_before)\n\t\t\t\t\tif found and requirement is not None:\n\t\t\t\t\t\trequirements[promotion.id] = { p.id:q for p, q in requirement} # updating promotion multi requirements\n\n\t\t\t\t\t# Updating promotion multi state\n\t\t\t\t\tprod, price = self.get_simple_price([{'product':product, 'quantity':1}], date)[0]\n\t\t\t\t\t# print quantity\n\t\t\t\t\tstate['products'][product.id] = {'price': price, 'qte':quantity}\n\t\t\t\t\t# print state['products'][product.id]\n\t\t\t\t\tstate['promotions'][promotion.id] = {'price': price_after, 'qte':0}\n\t\t\telse:\n\t\t\t\thistory = product.history_set.filter(created__gte = date-timedelta(hours = 24)).order_by('-created')\n\t\t\t\tif len(history)>0:\n\t\t\t\t\tself.price = self.price + quantity*history[0].price\n\t\t\t\telse:\n\t\t\t\t\thistory = product.history_set.all().order_by('-created')\n\t\t\t\t\tif len(history)>0:\n\t\t\t\t\t\tself.price = self.price + quantity*history[0].price\n\n\t\t# Dealing with multi promotion:\n\t\tmin_state, min_price = self.get_min_state(state, requirements)\n\t\tself.price = self.price + min_price\n\n\t\tself.save()\n\n\t\treturn self.price", "def get_total(self):\n total = 0.00\n\n for _drink in self.drinks:\n total = total + _drink.get_price()\n\n for _food in self.food:\n total = total + _food.get_price()\n\n return total", "def sum(self):\n return sum(self.values)", "def doSummary(self):\n for name in self.stockList:\n tempVolume=0.\n for dateStr in self.listOfDates:\n rawTradeDataPath = FileNames.BinRTTradesDir + '/' + dateStr + '/' + name + '_trades.binRT'\n tradeReader = TAQTradesReader(rawTradeDataPath)\n tempVolume=tempVolume+np.nansum(tradeReader._s)/10000.0 # divide 10000 because otherwise the sum could exceed the range of int32\n self.dict[name]=tempVolume", "def calculate_profit(self):", "def sum_promos_per_product(self, product=None):\n if product is None:\n subprods = SubscriptionProduct.objects.filter(\n route=self, subscription__active=True, subscription__type='P').aggregate(Sum('copies'))\n else:\n subprods = SubscriptionProduct.objects.filter(\n route=self, product=product, subscription__active=True, subscription__type='P').aggregate(Sum('copies'))\n return subprods['copies__sum']", "def sum(self):\n return self._reduce_for_stat_function(F.sum, only_numeric=True)", "def weighted_by_sum(\n self, other):\n provenance = NQExprProvenance(\n operation='weighted_by_sum',\n inner=self.provenance,\n other=other.provenance)\n with tf.name_scope('weighted_by_sum'):\n return self.context.as_nql(\n self.tf * tf.reduce_sum(input_tensor=other.tf, axis=1, keepdims=True),\n self._type_name, provenance)", "def sum(self, values):\n return self.aggregate(values, \"sum\")", "def get_prices(name,products,sales):\r\n return tuple((products[0],products[1]*((1-tuple(filter(lambda x: x[0]==name, sales))[0][1]))) for products in products)", "def coproduct_sum(sys):\r\n \r\n streams=coproduct_finder(sys)\r\n BD_mass=0\r\n Glycerol_mass=0\r\n products = {}\r\n for i in streams:\r\n if i == 'Waste':\r\n pass\r\n else:\r\n mass = find(i).mass\r\n BD_index=find(i).get_index('Biodiesel')\r\n Glycerol_index=find(i).get_index('Glycerol')\r\n BD = mass[BD_index]\r\n Glycerol = mass[Glycerol_index]\r\n BD_mass = BD_mass+ BD\r\n Glycerol_mass = Glycerol_mass + Glycerol\r\n products = {'Biodiesel (kg/hr)':BD_mass, 'Glycerol (kg/hr)': Glycerol_mass}\r\n return products", "def find_purchase_qty_in_duration(self,from_date,to_date,location,product_id):\n # query=\"\"\"\n # select sum(product_uom_qty) from stock_move mv \n # Inner join stock_location sl on sl.id = mv.location_id and sl.usage='supplier'\n # and mv.location_dest_id in (%s) where state='done' and product_id = %s and date between '%s 00:00:00' and '%s 23:59:59'\n # \"\"\"\n query = \"\"\"select sum(product_uom_qty) as total,product_uom from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_id and sl.usage='supplier' \n and mv.location_dest_id in (%s) where state='done' and product_id = %s and \n date between '%s 00:00:00' and '%s 23:59:59' group by product_uom\"\"\"%(\n ','.join(str(x) for x in location), product_id.id,from_date,to_date)\n self._cr.execute(query)\n result = self._cr.fetchall()\n uom_rec = self.env['product.uom']\n purchase_qty = 0\n for r in result:\n factor_inv = uom_rec.browse(r[1]).factor_inv\n purchase_qty += r[0] * factor_inv\n # Return Qty\n return_query = \"\"\"select sum(product_uom_qty) as total,product_uom \n from stock_move mv Inner join stock_location sl on sl.id = \n mv.location_dest_id and sl.usage='supplier' and mv.location_id in (\n %s) where state='done' and product_id = %s and date between '%s \n 00:00:00' and '%s 23:59:59' group by product_uom\"\"\" % (\n ','.join(str(x) for x in location), product_id.id, from_date,\n to_date)\n self._cr.execute(return_query)\n return_result = self._cr.fetchall()\n purchase_return_qty = 0\n for re in return_result:\n factor_inv = uom_rec.browse(re[1]).factor_inv\n purchase_return_qty += re[0] * factor_inv\n purchase_qty -= purchase_return_qty\n return purchase_qty", "def get_prices_dict(name,products,sales):\r\n return {x:(1-sales[name])*products[x] for x in products}", "def get_week_order_totals(date):\n query = sqla.text(\"\"\"\n SELECT bt.id, bt.name, COUNT(bod.id)\n FROM bread_order_date AS bod\n JOIN bread_order AS bo ON bod.id = bo.date_id\n JOIN bread_type AS bt ON bo.type_id = bt.id\n WHERE bod.id = :date\n GROUP BY(bt.id)\n ORDER BY bt.id\n \"\"\")\n return db.session.execute(query, {\"date\": date.id})", "def get_data_sales(self):\n return {\n 'search_type': SearchForm.SEARCH_TYPE_SALE,\n 'min_price': '40000',\n 'max_price': '50000',\n 'location':'Test, Test',\n 'min_bedrooms': '5',\n 'property_type': str(PropertyTypeFactory().slug)\n }", "def get_amount(data):\r\n data = json.loads(data)\r\n products = data.get(\"CartProduct\", {\"all\": []})\r\n\r\n # Make sure we get all products in the cart.\r\n if \"all\" in products: products = products[\"all\"]\r\n else : products = [products]\r\n\r\n amount = 0.0\r\n\r\n for p in products:\r\n try: amount += float(p[\"productPrice\"]) * float(p[\"productQuantity\"])\r\n except: pass\r\n \r\n return amount", "def find_sale_qty_in_duration(self,from_date,to_date,warehouse,location,product_id):\n if warehouse:\n query=\"\"\"select sum(product_uom_qty) as total, product_uom from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_dest_id and sl.usage='customer'\n where state='done' and mv.location_id in (%s) and product_id = %s and \n warehouse_id= %s and date between '%s 00:00:00' and '%s 23:59:59' group by product_uom\n \"\"\"%(','.join(str(x) for x in location),product_id.id,warehouse.id,from_date,to_date)\n return_query=\"\"\"select sum(product_uom_qty) as total, product_uom from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_id and sl.usage='customer'\n where state='done' and mv.location_dest_id in (%s) and product_id = %s and \n warehouse_id= %s and date between '%s 00:00:00' and '%s 23:59:59' group by product_uom\n \"\"\"%(','.join(str(x) for x in location),product_id.id,warehouse.id,from_date,to_date)\n else:\n query=\"\"\"select sum(product_uom_qty) as total,product_uom from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_dest_id and sl.usage='customer'\n where state='done' and mv.location_id in (%s) and product_id = %s and \n date between '%s 00:00:00' and '%s 23:59:59' group by product_uom\n \"\"\"%(','.join(str(x) for x in location),product_id.id,from_date,to_date)\n return_query=\"\"\"select sum(product_uom_qty) as total,product_uom from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_id and sl.usage='customer'\n where state='done' and mv.location_dest_id in (%s) and product_id = %s and \n date between '%s 00:00:00' and '%s 23:59:59' group by product_uom\n \"\"\"%(','.join(str(x) for x in location),product_id.id,from_date,to_date)\n self._cr.execute(query)\n result = self._cr.fetchall()\n uom_rec = self.env['product.uom']\n sale_qty = 0\n for r in result:\n factor_inv = uom_rec.browse(r[1]).factor_inv\n sale_qty += r[0] * factor_inv\n # Return Qty\n self._cr.execute(return_query)\n return_result = self._cr.fetchall()\n sale_return_qty = 0\n for re in return_result:\n factor_inv = uom_rec.browse(re[1]).factor_inv\n sale_return_qty += re[0] * factor_inv\n sale_qty -= sale_return_qty\n return sale_qty", "def sum_copies_per_product(self, product=None, new=False):\n if product is None:\n subprods = SubscriptionProduct.objects.filter(route=self, subscription__active=True)\n else:\n subprods = SubscriptionProduct.objects.filter(route=self, product=product, subscription__active=True)\n if new:\n subprods = subprods.filter(subscription__start_date__gte=date.today() - timedelta(7))\n subprods = subprods.aggregate(Sum('copies'))\n return subprods['copies__sum']", "def sum(self) -> float:\n return sum(self.values)", "def prepare_data_with_location(self,from_date,to_date,locations,all_products):\n data_dict = {}\n stock_quant_obj=self.env['stock.quant']\n for loc in locations:\n all_locations = self.get_all_locations(warehouse=False, location=loc)\n if not all_locations:\n continue\n #here we are finding the opening stock for these we are using base query\n #of inventory at date v10\n result = self.get_product_qty(all_locations,from_date)\n qty_dict = dict((x,y) for x, y in result)\n \n for product in all_products:\n last_sales = ''\n qty_purchase_in_duration = 0\n qty_sales_in_duration = 0\n last_purchase_date = ''\n scrap_location_qty = 0\n adjusted_qty_in_duration = 0\n warehouse_out_qty = 0\n warehouse_in_qty = 0\n# here from result of inventory at date we are seaching for specific product.\n opening_product_qty = qty_dict.get(product.id)\n\n #finding last sales qty\n last_sales = self.find_last_sales_qty(from_date,to_date,False,all_locations,product)\n #finding last purchase date of product\n last_purchase_date = self.find_last_purchase_date(from_date,to_date,all_locations,product)\n #fiding date purchase qty in duration for specific product\n qty_purchase_in_duration = self.find_purchase_qty_in_duration(from_date,to_date,all_locations,product)\n #fiding scrap qty of precific product\n scrap_location_qty = self.find_scap_location_qty(from_date,to_date,product,all_locations)\n #finding sales qty in duration\n qty_sales_in_duration = self.find_sale_qty_in_duration(from_date,to_date,False,all_locations,product)\n #fidning adjusted qty in duration\n adjusted_qty_in_duration = self.find_adjusted_qty_in_duration(from_date, to_date, product, all_locations)\n\n # dest_location_lst = self.get_other_wahouse_locations(warehouse)\n \n # if any(all_locations) and any(dest_location_lst):\n # #fidning warehouse in qty \n # warehouse_in_qty = self.find_warehouse_transer_in_qty(product, all_locations, dest_location_lst,from_date,to_date)\n # #fidning warehouse out qty for specific product.\n # warehouse_out_qty = self.find_warehouse_transer_out_qty(product, all_locations, dest_location_lst,from_date,to_date)\n \n # if warehouse_out_qty:\n # warehouse_out_qty = warehouse_out_qty and warehouse_out_qty[0][0] or ''\n # if warehouse_in_qty:\n # warehouse_in_qty = warehouse_in_qty and warehouse_in_qty[0][0] or ''\n \n if adjusted_qty_in_duration:\n adjusted_qty_in_duration = adjusted_qty_in_duration and adjusted_qty_in_duration[0][0] or '' \n if scrap_location_qty:\n scrap_location_qty = scrap_location_qty and scrap_location_qty[0][0] or ''\n \n # if qty_sales_in_duration:\n # qty_sales_in_duration = qty_sales_in_duration and qty_sales_in_duration[0][0] or ''\n # if qty_purchase_in_duration:\n # qty_purchase_in_duration = qty_purchase_in_duration or ''\n if last_sales:\n last_sales = datetime.strptime(last_sales and last_sales[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if last_purchase_date:\n last_purchase_date = datetime.strptime(last_purchase_date and last_purchase_date[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if data_dict.has_key(loc.id):\n data_lst=data_dict.get(loc.id)\n data_lst.append({'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,'last_sales':last_sales or '',\n 'last_purchase_date':last_purchase_date or '','qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0\n ,'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0 \n })\n data_dict.update({loc.id:data_lst})\n continue\n data_dict.update({loc.id:[{'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,\n 'last_sales':last_sales or '','last_purchase_date':last_purchase_date or '',\n 'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,\n 'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0\n }]})\n return data_dict", "def sum (self):\n return self.values.sum ()", "def sum (self):\n return self.values.sum ()", "def sum(self, event, from_date, to_date, on, unit=None, where=None):\r\n params = base.get_params(('event', 'from_date', 'to_date', 'on',\r\n 'unit', 'where'), locals(), serialize_param)\r\n\r\n request = http.Request('GET', 'segmentation/sum/', params)\r\n\r\n return request, parsers.parse_json", "def weightedAverage(requestContext, seriesListAvg, seriesListWeight, node):\n\n sortedSeries={}\n\n for seriesAvg, seriesWeight in izip(seriesListAvg , seriesListWeight):\n key = seriesAvg.name.split(\".\")[node]\n if key not in sortedSeries:\n sortedSeries[key]={}\n\n sortedSeries[key]['avg']=seriesAvg\n key = seriesWeight.name.split(\".\")[node]\n if key not in sortedSeries:\n sortedSeries[key]={}\n sortedSeries[key]['weight']=seriesWeight\n\n productList = []\n\n for key in sortedSeries.keys():\n if 'weight' not in sortedSeries[key]:\n continue\n if 'avg' not in sortedSeries[key]:\n continue\n\n seriesWeight = sortedSeries[key]['weight']\n seriesAvg = sortedSeries[key]['avg']\n\n productValues = [ safeMul(val1, val2) for val1,val2 in izip(seriesAvg,seriesWeight) ]\n name='product(%s,%s)' % (seriesWeight.name, seriesAvg.name)\n productSeries = TimeSeries(name,seriesAvg.start,seriesAvg.end,seriesAvg.step,productValues)\n productSeries.pathExpression=name\n productList.append(productSeries)\n\n sumProducts=sumSeries(requestContext, productList)[0]\n sumWeights=sumSeries(requestContext, seriesListWeight)[0]\n\n resultValues = [ safeDiv(val1, val2) for val1,val2 in izip(sumProducts,sumWeights) ]\n name = \"weightedAverage(%s, %s)\" % (','.join(set(s.pathExpression for s in seriesListAvg)) ,','.join(set(s.pathExpression for s in seriesListWeight)))\n resultSeries = TimeSeries(name,sumProducts.start,sumProducts.end,sumProducts.step,resultValues)\n resultSeries.pathExpression = name\n return resultSeries", "def get_product_qty(self,location,date):\n query = \"\"\"\n SELECT tmp.product_id,SUM(quantity) as quantity FROM( \n SELECT location_id,\n product_id,\n SUM(quantity) as quantity,\n date,\n COALESCE(SUM(price_unit_on_quant * quantity) / NULLIF(SUM(quantity), 0), 0) as price_unit_on_quant\n FROM\n ((SELECT\n stock_move.id AS id,\n stock_move.id AS move_id,\n dest_location.id AS location_id,\n dest_location.company_id AS company_id,\n stock_move.product_id AS product_id,\n product_template.id AS product_template_id,\n product_template.categ_id AS product_categ_id,\n quant.qty AS quantity,\n stock_move.date AS date,\n quant.cost as price_unit_on_quant,\n stock_move.origin AS source,\n stock_production_lot.name AS serial_number\n FROM\n stock_quant as quant\n JOIN\n stock_quant_move_rel ON stock_quant_move_rel.quant_id = quant.id\n JOIN\n stock_move ON stock_move.id = stock_quant_move_rel.move_id\n LEFT JOIN\n stock_production_lot ON stock_production_lot.id = quant.lot_id\n JOIN\n stock_location dest_location ON stock_move.location_dest_id = dest_location.id\n JOIN\n stock_location source_location ON stock_move.location_id = source_location.id\n JOIN\n product_product ON product_product.id = stock_move.product_id\n JOIN\n product_template ON product_template.id = product_product.product_tmpl_id\n WHERE quant.qty>0 AND stock_move.state = 'done' AND dest_location.usage in ('internal', 'transit')\n AND (\n not (source_location.company_id is null and dest_location.company_id is null) or\n source_location.company_id != dest_location.company_id or\n source_location.usage not in ('internal', 'transit'))\n ) UNION ALL\n (SELECT\n (-1) * stock_move.id AS id,\n stock_move.id AS move_id,\n source_location.id AS location_id,\n source_location.company_id AS company_id,\n stock_move.product_id AS product_id,\n product_template.id AS product_template_id,\n product_template.categ_id AS product_categ_id,\n - quant.qty AS quantity,\n stock_move.date AS date,\n quant.cost as price_unit_on_quant,\n stock_move.origin AS source,\n stock_production_lot.name AS serial_number\n FROM\n stock_quant as quant\n JOIN\n stock_quant_move_rel ON stock_quant_move_rel.quant_id = quant.id\n JOIN\n stock_move ON stock_move.id = stock_quant_move_rel.move_id\n LEFT JOIN\n stock_production_lot ON stock_production_lot.id = quant.lot_id\n JOIN\n stock_location source_location ON stock_move.location_id = source_location.id\n JOIN\n stock_location dest_location ON stock_move.location_dest_id = dest_location.id\n JOIN\n product_product ON product_product.id = stock_move.product_id\n JOIN\n product_template ON product_template.id = product_product.product_tmpl_id\n WHERE quant.qty>0 AND stock_move.state = 'done' AND source_location.usage in ('internal', 'transit')\n AND (\n not (dest_location.company_id is null and source_location.company_id is null) or\n dest_location.company_id != source_location.company_id or\n dest_location.usage not in ('internal', 'transit'))\n ))\n AS foo\n GROUP BY move_id, location_id, company_id, product_id, date) AS tmp\n WHERE tmp.location_id in (%s) AND tmp.date < '%s 00:00:00'\n GROUP BY tmp.product_id \n \"\"\"%(','.join(str(x) for x in location),date)\n self._cr.execute(query)\n result = self._cr.fetchall()\n return result", "def _get_sum_total(\n self, cr, uid, brw, operand, number_month=None,\n one_per=False, bag=None, context=None):\n context = context and dict(context) or {}\n res = 0\n\n # If the report is two or twelve columns, will choose the field needed\n # to make the sum\n if context.get('whole_fy', False) or one_per:\n field_name = 'ytd'\n else:\n field_name = 'period_%s' % str(number_month)\n\n # It takes the sum of the total_ids & operand_ids\n for ttt in getattr(brw, operand):\n res += bag[ttt.id].get(field_name, 0.0)\n return res", "def weighted_sum(self, inputs):\r\n weighted_sum = 0\r\n for i in range(self.num_inputs):\r\n weighted_sum += self.weights[i]*inputs[i]\r\n return weighted_sum", "def inventory_report(products):\n unique_names = []\n total_price = 0\n total_weight = 0\n total_flammability = 0\n num_products = len(products)\n for i in range(num_products):\n if products[i].name not in unique_names:\n unique_names.append(products[i].name) \n total_price += products[i].price\n total_weight += products[i].weight\n total_flammability += products[i].flammability\n mean_price = total_price / num_products\n mean_weight = total_weight / num_products\n mean_flammability = total_flammability / num_products\n print('ACME CORPORATION OFFICIAL INVENTORY REPORT')\n print(f'Unique product names: {len(unique_names)}')\n print(f'Average price: {mean_price}')\n print(f'Average weight {mean_weight}')\n print(f'Average flammabilitiy {mean_flammability}')\n return unique_names, mean_price, mean_weight, mean_flammability", "def get_average_sales(data):\n print(\"Calculating stock data...\\n\")\n avg_sales = []\n for list in data:\n int_list_avg = sum(int(item) for item in list) / len(list)\n avg_plus_extra = round(int_list_avg * 1.1)\n avg_sales.append(avg_plus_extra)\n\n return avg_sales", "def sum(self):\n return sum(self.items())", "def get_all_sales(self):\n all_sales = self.dbconn.get_all_sales()\n return all_sales", "def amount_gathering(user_recipe):\r\n #Forms Dictionary\r\n sales_stats = dictionary_formation()\r\n amount_list = []\r\n month_list = [\"Nov\", \"Dec\", \"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\",\r\n \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\"]\r\n for month in month_list:\r\n bottles_amount = 0\r\n dicts_read = 2\r\n for dicts_read in sales_stats:\r\n analyse_dict = sales_stats[str(dicts_read)]\r\n if month in analyse_dict[\"date_required\"]:\r\n if analyse_dict[\"recipe\"] == user_recipe:\r\n bottles_amount += analyse_dict[\"quantity_ordered\"]\r\n amount_list.append(bottles_amount)\r\n return amount_list", "def get_weights_sum(self):\n return self.w_sum", "def yearly_sales(self):\n\n last_365_days = timezone.now() - timedelta(days=365)\n items = self.item_set.filter(status=\"sold\", updated_at__gte=last_365_days)\n total_sales = 0\n if item in items:\n total_sales += item.price\n return total_sales", "def get_simple_price(self, content, date = None):\n\t\tprices = []\n\t\tfor element in content:\n\t\t\tproduct = element['product']\n\t\t\tquantity = element['quantity']\n\n\t\t\thistory = product.history_set.filter(created__gte = date-timedelta(hours = 24)).order_by('-created')\n\t\t\tif len(history)>0:\n\t\t\t\tprices.append((element, quantity*history[0].price))\n\t\t\telse:\n\t\t\t\thistory = product.history_set.all().order_by('-created')\n\t\t\t\tif len(history)>0:\n\t\t\t\t\tprices.append((element, quantity*history[0].price))\n\t\t\t\telse:\n\t\t\t\t\tprices.append((element, None))\n\t\treturn prices", "def wsum(self):\n return reduce(operator.add, self.wvalues, 0.0)", "def SumCostByBetweenDates(startDate, endDate):\n\n logs.logger.debug(\n \"Start to adds all amount of Cost objects between two dates.\")\n try:\n searchedCostByBetweenDatesFromDB = GetAllCostByDateOfPaymentBandFromDB(\n startDate, endDate)\n sumTotal = 0\n for item in searchedCostByBetweenDatesFromDB:\n sumTotal += item.amount\n logs.logger.info(\n \"Between two dates adds all amount of Cost objects.\")\n return sumTotal\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def _line_example_2_data(data):\n price_by_date_and_country = (\n data.groupby([\"date\", \"fruit\"])[\"total_price\"]\n .sum()\n .reset_index() # Move 'date' and 'country' from index to column\n )\n print(price_by_date_and_country.head())\n \"\"\"Print break\"\"\"\n return price_by_date_and_country", "def total_purchase(self):\n\n total_amount = 0\n #grab all the item\n items = self.item_set.all()\n for item in items:\n total_amount += item.price\n return total_amount", "def execQ2():\n # Put columns together\n frame = pan.DataFrame(data, columns=['Product', 'Amount'] )\n amount = frame.groupby(['Product']).count()\n return amount", "def get_daily_gain(data, timezone=pytz.timezone('US/Pacific')):\n\n date_price_data = np.array([(d['completed_at'], d['price']) for d in data])\n\n price_series = pd.Series(date_price_data[:, 1].astype(float),\n index=pd.to_datetime(date_price_data[:, 0]))\n price_series = price_series.sort_index()\n\n # Convert timezone\n utc = pytz.utc\n price_series = price_series.tz_localize(utc).tz_convert(timezone)\n\n\n # Calculate the gain by day\n daily_gain = price_series.groupby(pd.TimeGrouper('D')).sum()\n\n return daily_gain", "def sum(self):\n return self.aggregate(np.sum)", "def total_qty(self):\n return sum(self.quantities)", "def sum(self):\n return sum(self._values.values())", "def daily_speed_sum_reduce(key, values):\n\tyield \"%s: %s, %s\\n\" % (key, sum([int(value) for value in values]), len(values))", "def get_daily_list(context, data_dict):\n # noinspection PyUnresolvedReferences\n\n output = []\n start_date_str = _get_or_bust(data_dict, 'startDate')\n try:\n dt.strptime(start_date_str, '%Y-%m-%d')\n except ValueError:\n raise _ValidationError(\n 'startDate \\'{0}\\' not in YYYY-MM-DD format'.format(start_date_str)\n )\n start_date = parse(start_date_str,\n default=default_release_date).astimezone(gettz('UTC'))\n\n if 'endDate' in data_dict:\n end_date_str = data_dict['endDate']\n try:\n dt.strptime(end_date_str, '%Y-%m-%d')\n except ValueError:\n raise _ValidationError(\n 'endDate \\'{0}\\' not in YYYY-MM-DD format'.format(end_date_str)\n )\n end_date = parse(end_date_str,\n default=default_release_date).astimezone(gettz('UTC'))\n days = (end_date - start_date).days + 1\n if days < 1:\n raise _ValidationError(_(\n 'endDate \\'{0}\\' must be greater '\n 'than startDate \\'{1}\\''.format(\n end_date_str,\n start_date_str\n )\n ))\n else:\n days = 1\n\n for day in range(days):\n single_date = (start_date + datetime.timedelta(days=day))\n single_date_str = single_date.replace(tzinfo=None).isoformat()\n q = {\n 'q': (\n 'product_type_code:24 AND '\n 'last_release_date:\"{release_date}Z\"'.format(\n release_date=single_date_str\n )\n )\n }\n\n results = _get_action('package_search')(context, q)\n\n count = results['count']\n if count > 1:\n raise _ValidationError(\n 'More than one Daily for date \\'{0}\\''.format(single_date_str)\n )\n\n for result in results['results']:\n children = []\n\n for child in result.get('child_list', []):\n children.append(\n get_product(context, {\n 'productId': child\n })\n )\n\n result['children'] = children\n output.append(result)\n\n return output", "def get_the_sum_of_prices_from_table(table, item_ids):\n return common.szum(table, PRICE, lambda row: row[ID] in item_ids)", "def calc_total_price(price_per_day, date_from, date_to):\n date_from = datetime.strptime(date_from, '%Y-%m-%d')\n date_to = datetime.strptime(date_to, '%Y-%m-%d')\n n_days = date_to - date_from\n n_days = n_days.days + 1\n return price_per_day * n_days", "def get_sales_data():\n print(\"Retrieving all the sales information...\")\n data = SHEET.worksheet('sales')\n print(\"Compilation complete!\\n\")\n return data", "def _total_price(self, cr, uid, ids, field_name, arg, context={}):\n res = {}\n for record in self.browse(cr, uid, ids, context=context):\n val = 0.0\n for line in record.item_ids:\n val += line.price\n res[record.id] = val \n return res", "def sum(self):\n return self.vsum", "def current_energy_produced(self):\n return self.df.exp.sum()", "def weighted_sum_da(da, dim=None, weights=None):\n if weights is None:\n warn('Computing sum using equal weights for all data points')\n return da.sum(dim)\n else:\n weights, _ = validate_weights(da, dim, weights)\n return (da * weights).sum(dim)", "def leverage(self):\n return self.weights.sum(axis=1).dropna().apply(float)", "def get_total_amount(self):\n total_price = 0.00\n\n for k, v in self.order.product_orders.items():\n total_price += v.quantity * v.product.price\n\n return total_price", "def sumSet(weightedSet):\n\tsum = 0\n\tfor example in weightedSet:\n\t\tsum += example.weight\n\treturn sum", "def find_last_sales_qty(self,from_date,to_date,warehouse,location,product_id):\n if warehouse:\n query=\"\"\"\n select date from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_dest_id and sl.usage='customer' and mv.location_id in (%s)\n where state='done' and product_id = %s and warehouse_id= %s and date between '%s 00:00:00' and '%s 23:59:59'\n order by date desc\n limit 1\n \"\"\"%(','.join(str(x) for x in location),product_id.id,warehouse.id,from_date,to_date)\n else:\n query=\"\"\"\n select date from stock_move mv \n Inner join stock_location sl on sl.id = mv.location_dest_id and sl.usage='customer' and mv.location_id in (%s)\n where state='done' and product_id = %s and date between '%s 00:00:00' and '%s 23:59:59'\n order by date desc\n limit 1\n \"\"\"%(','.join(str(x) for x in location),product_id.id,from_date,to_date)\n self._cr.execute(query)\n result = self._cr.fetchall()\n return result", "def _weighted_sum(self, data, sum_func):\n if self.weights.shape != data.shape:\n # Add extra axes to the weights for broadcasting\n weights = np.reshape(self.weights, [len(self.weights), 1, 1])\n else:\n weights = self.weights\n\n # Turns out bn.nansum has an implementation that is not\n # precise enough for float32 sums. Doing this should\n # ensure the sums are carried out as float64\n weights = weights.astype('float64')\n weighted_sum = sum_func(data * weights, axis=0)\n return weighted_sum, weights", "def sum_value(self, lv, rv):", "def get_all_sales_for_user(self, username):\n all_sales = self.dbconn.get_all_sales_for_user(username=username)\n return all_sales" ]
[ "0.68591684", "0.6359811", "0.6157366", "0.60738873", "0.6023802", "0.5834238", "0.58238226", "0.5762609", "0.5700813", "0.56557745", "0.5614207", "0.5598102", "0.5577538", "0.55420864", "0.5539422", "0.55292135", "0.5523316", "0.54981434", "0.54949695", "0.5454806", "0.5439196", "0.5438103", "0.5416838", "0.5407052", "0.5371839", "0.5357793", "0.53428096", "0.53339607", "0.5330621", "0.5308541", "0.5289461", "0.5281291", "0.52669686", "0.52661836", "0.52644527", "0.5262526", "0.52581704", "0.5245099", "0.5244529", "0.5238931", "0.5215617", "0.5197052", "0.51893586", "0.5179003", "0.517682", "0.5158649", "0.51562077", "0.5151569", "0.51497626", "0.5141067", "0.51388353", "0.50968003", "0.5093133", "0.50875866", "0.508712", "0.5078272", "0.50749105", "0.5062757", "0.50609267", "0.5054996", "0.5051942", "0.5051942", "0.50506955", "0.50451595", "0.5034872", "0.50149524", "0.5014901", "0.50126386", "0.5001962", "0.4992013", "0.4987372", "0.49843785", "0.49836537", "0.4977106", "0.49769473", "0.49728787", "0.49712023", "0.49673626", "0.4965885", "0.4965259", "0.49646592", "0.49595773", "0.49574468", "0.49489188", "0.49471188", "0.4946671", "0.49393943", "0.49288642", "0.49282807", "0.492478", "0.49230936", "0.49166116", "0.49115062", "0.4897586", "0.4891917", "0.48850232", "0.48813844", "0.48782596", "0.48531136", "0.48523757" ]
0.6372925
1
This function convert a date to the corresponding type of the day (working day/holiday/MonSun)
def generate_day_type(date): cal = France() if cal.is_holiday(date): # If Mon-Friday if date.weekday() in range(5): return 0. else: return 1. else: if date.weekday() in range(5): return 1. else: return 0.
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def date_to_day_of_week(date):\n return date.weekday()", "def number_to_day(n):\n if n == 1:\n return \"Sunday\"\n elif n == 2:\n return \"Monday\"\n elif n == 3:\n return \"Tuesday\"\n elif n == 4:\n return \"Wednesday\"\n elif n == 5:\n return \"Thursday\"\n elif n == 6:\n return \"Friday\"\n elif n == 7:\n return \"Saturday\"", "def WEEKDAY(date, return_type=1):\n if return_type not in _weekday_type_map:\n raise ValueError(\"Invalid return type %s\" % (return_type,))\n (first, index) = _weekday_type_map[return_type]\n return (_make_datetime(date).weekday() - first) % 7 + index", "def get_the_weekday(self,date):\n date_convert = date.split('-')\n week_days = (\"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \"Friday\", \"Saturday\", \"Sunday\")\n date_list = [int(i) for i in date_convert]\n day = datetime.date(date_list[0], date_list[1], date_list[2])\n # convert weekday into digit (eg Mon -> 0,)\n num_day = day.weekday()\n day_as_string = week_days[num_day]\n return day_as_string", "def assign_numeric_day(x):\n\n if x == 'Sunday':\n return 0\n elif x == 'Monday':\n return 1\n elif x == 'Tuesday':\n return 2\n elif x == 'Wednesday':\n return 3\n elif x == 'Thursday':\n return 4\n elif x == 'Friday':\n return 5\n elif x == 'Saturday':\n return 6", "def WEEKDAY(\n serial_number: func_xltypes.XlNumber,\n return_type: func_xltypes.XlNumber = None\n) -> func_xltypes.XlNumber:\n\n date = utils.number_to_datetime(int(serial_number))\n\n if return_type is None:\n # Numbers 1 (Sunday) through 7 (Saturday)\n weekDays = (2, 3, 4, 5, 6, 7, 1)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 1:\n # Numbers 1 (Sunday) through 7 (Saturday)\n weekDays = (2, 3, 4, 5, 6, 7, 1)\n return weekDays[date.weekday()]\n\n # weekday() is 0 based, starting on a Monday\n elif int(return_type) == 2:\n # Numbers 1 (Monday) through 7 (Sunday)\n weekDays = (1, 2, 3, 4, 5, 6, 7)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 3:\n # Numbers 0 (Monday) through 6 (Sunday)\n weekDays = (0, 1, 2, 3, 4, 5, 6)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 11:\n # Numbers 1 (Monday) through 7 (Sunday)\n weekDays = (1, 2, 3, 4, 5, 6, 7)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 12:\n # Numbers 1 (Tuesday) through 7 (Monday)\n weekDays = (7, 1, 2, 3, 4, 5, 6)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 13:\n # Numbers 1 (Wednesday) through 7 (Tuesday)\n weekDays = (6, 7, 1, 2, 3, 4, 5)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 14:\n # Numbers 1 (Thursday) through 7 (Wednesday)\n weekDays = (5, 6, 7, 1, 2, 3, 4)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 15:\n # Numbers 1 (Friday) through 7 (Thursday)\n weekDays = (4, 5, 6, 7, 1, 2, 3)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 16:\n # Numbers 1 (Saturday) through 7 (Friday)\n weekDays = (3, 4, 5, 6, 7, 1, 2)\n return weekDays[date.weekday()]\n\n elif int(return_type) == 17:\n # Numbers 1 (Sunday) through 7 (Saturday)\n weekDays = (2, 3, 4, 5, 6, 7, 1)\n return weekDays[date.weekday()]\n\n else:\n raise xlerrors.NumExcelError(\n f\"return_type needs to be omitted or one of 1, 2, 3, 11, 12, 13,\\\n 14, 15, 16 or 17. You supplied {return_type}\")", "def get_date_day(date):\n cut_date = date.split('-')\n return cut_date[2]", "def date_day_of_week(date):\n day_of_week = date.strftime('%A')\n return day_of_week", "def convertSODate(datenum):\n #Date numbers seem to start with 0 = 2001-01-01\n base_date = datetime.date(2001, 1, 1)\n #add key from the spot on object to this base date to get the date\n record_date = base_date + datetime.timedelta(days=int(datenum))\n record_date = record_date.isoformat()\n return record_date", "def dow(values, feature, parent): \r\n input_date = values[0]\r\n \r\n # Return dayOfWeek() % 7 so that values range from 0 (sun) to 6 (sat)\r\n # to match Postgresql behaviour\r\n if type(input_date) == QDateTime:\r\n return input_date.date().dayOfWeek() % 7\r\n elif type(input_date) == QDate:\r\n return input_date.dayOfWeek() % 7\r\n elif type(input_date) in (str, unicode): \r\n # Convert string to qdate\r\n input_qdate = QDate.fromString(input_date, 'yyyy-MM-dd')\r\n if input_qdate.isValid():\r\n return input_qdate.dayOfWeek() % 7 \r\n else:\r\n return None", "def dow(values, feature, parent): \r\n input_date = values[0]\r\n \r\n # Return dayOfWeek() % 7 so that values range from 0 (sun) to 6 (sat)\r\n # to match Postgresql behaviour\r\n if type(input_date) == QDateTime:\r\n return input_date.date().dayOfWeek() % 7\r\n elif type(input_date) == QDate:\r\n return input_date.dayOfWeek() % 7\r\n elif type(input_date) in (str, unicode): \r\n # Convert string to qdate\r\n input_qdate = QDate.fromString(input_date, 'yyyy-MM-dd')\r\n if input_qdate.isValid():\r\n return input_qdate.dayOfWeek() % 7 \r\n else:\r\n return None", "def insure_date(d):\n if isinstance(d, BeautifulDate):\n return date(year=d.year, month=d.month, day=d.day)\n else:\n return d", "def day_num(x):\r\n if x==\"Sunday\":\r\n return 0\r\n elif x==\"Monday\":\r\n return 1\r\n elif x==\"Tuesday\":\r\n return 2\r\n elif x==\"Wednesday\":\r\n return 3\r\n elif x==\"Thursday\":\r\n return 4\r\n elif x==\"Friday\":\r\n return 5\r\n elif x==\"Saturday\":\r\n return 6", "def date_form(day):\r\n new_day = \"\"\r\n if day == \"Monday\":\r\n new_day = \"Poniedziaล‚ek\"\r\n elif day == \"Tuesday\":\r\n new_day = \"Wtorek\"\r\n elif day == \"Wednesday\":\r\n new_day = \"ลšroda\"\r\n elif day == \"Thursday\":\r\n new_day = \"Czwartek\"\r\n elif day == \"Friday\":\r\n new_day = \"Piฤ…tek\"\r\n elif day == \"Saturday\":\r\n new_day = \"Sobota\"\r\n elif day == \"Sunday\":\r\n new_day = \"Niedziela\"\r\n return new_day", "def convert_day(day):\n\n day_list = []\n\n if 'M' in day:\n day_list.append(1)\n if 'T' in day:\n day_list.append(2)\n if 'W' in day:\n day_list.append(3)\n if 'R' in day:\n day_list.append(4)\n if 'F' in day:\n day_list.append(5)\n if 'S' in day:\n day_list.append(6)\n if 'U' in day:\n day_list.append(7)\n\n return day_list", "def DAY(date):\n return _make_datetime(date).day", "def test_jd2dow():\n\tjd = [2434923.5,2458130.5]\n\tdnum_true = [3,5]\n\tdnam_true = np.array(['Wed','Fri'],dtype='|S3')\n\tdnum_test, dnam_test = date_functions.jd2dow( jd )\n\t\n\tassert dnum_test[0] == dnum_true[0]\n\tassert dnum_test[1] == dnum_true[1]\n\tassert dnam_test[0] == dnam_true[0]\n\tassert dnam_test[1] == dnam_true[1]", "def weekday(day):\n return (day % 7) - 1", "def convert_date(date):\n date = get_nummeric_only(date) \n \n \n if len(date) == 8:\n\n year = int(date[:4]) \n month = int(date[4:6])\n day = int(date[6:8])\n \n date_time = dt.datetime(year,month,day)\n \n return date_time\n \n if len(date) == 12 or len(date) == 14:\n\n year = int(date[:4]) \n month = int(date[4:6])\n day = int(date[6:8])\n hour = int(date[8:10])\n minute = int(date[10:12])\n \n date_time = dt.datetime(year,month,day, hour, minute)\n \n return date_time\n else:\n return 0", "def day_of_the_week(arg):", "def make_tuesday(date):\n offset = (date.weekday() - 1) % 7\n tuesday = date - datetime.timedelta(days=offset)\n # Ensure that the database has this date\n with get_dbconn(\"postgis\") as conn:\n cursor = conn.cursor()\n cursor.execute(\"SELECT max(valid) from usdm\")\n maxdate = cursor.fetchone()[0]\n if maxdate is not None:\n tuesday = min([tuesday, maxdate])\n return tuesday", "def doomsday(y):", "def dayweek_clean(fecha):\n\n try:\n lista = fecha.split(sep = '/')\n fecha = '-'.join(reversed(lista))\n temp = pd.Timestamp(fecha)\n dia_semana = (temp.dayofweek, temp.day_name())\n return dia_semana[1]\n \n except:\n #print ('hola')\n return None", "def date_day(date):\n return date_day_of_month(date)", "def convert_date_type(dates):\n try:\n return datetime.strptime(dates, '%Y-%m-%d')\n except ValueError:\n return datetime.strptime(dates, '%d/%m/%Y')", "def get_day(x):\n return x[\"SALE DATE\"].day", "def naturalday(value, format='%b %d'):\r\n try:\r\n value = date(value.year, value.month, value.day)\r\n except AttributeError:\r\n # Passed value wasn't date-ish\r\n return value\r\n except (OverflowError, ValueError):\r\n # Date arguments out of range\r\n return value\r\n delta = value - date.today()\r\n if delta.days == 0:\r\n return _('today')\r\n elif delta.days == 1:\r\n return _('tomorrow')\r\n elif delta.days == -1:\r\n return _('yesterday')\r\n return value.strftime(format)", "def preprocess_date(date_):\n if 'JAN' in date_:\n date_ = date_.replace('JAN', '01')\n elif 'FEB' in date_:\n date_ = date_.replace('FEB', '02')\n elif 'MAR' in date_:\n date_ = date_.replace('MAR', '03')\n elif 'APR' in date_:\n date_ = date_.replace('APR', '04')\n elif 'MAY' in date_:\n date_ = date_.replace('MAY', '05')\n elif 'JUN' in date_:\n date_ = date_.replace('JUN', '06')\n elif 'JUL' in date_:\n date_ = date_.replace('JUL', '07')\n elif 'AUG' in date_:\n date_ = date_.replace('AUG', '08')\n elif 'SEP' in date_:\n date_ = date_.replace('SEP', '09')\n elif 'OCT' in date_:\n date_ = date_.replace('OCT', '10')\n elif 'NON' in date_:\n date_ = date_.replace('NON', '11')\n elif 'DEC' in date_:\n date_ = date_.replace('DEC', '12')\n if date_[-2:] > '17':\n date_ = date_[:6] + '19' + date_[-2:]\n else:\n date_ = date_[:6] + '20' + date_[-2:]\n return datetime.strptime(date_, '%d-%m-%Y')", "def dayofweek(day, month, year, formatresult=True):\n if formatresult is False:\n return calendar.weekday(year, month, day) + 1\n days = {\n 0: 'Monday',\n 1: \"Tuesday\",\n 2: \"Wednesday\",\n 3: \"Thursday\",\n 4: \"Friday\",\n 5: \"Saturday\",\n 6: \"Sunday\"\n }\n return days[calendar.weekday(year, month, day)]", "def day(sym, date):\n return get(sym, date, date)[0][1]", "def str_day(s):\n # TODO: Fix the -06:00 time zone offset\n if s:\n d = convert_from_iso(s)\n return datetime.datetime.strftime(d, \"%d\").strip(\" \")\n else:\n # Couldn't parse, return original.\n return s", "def day_of_week(date: datetime) -> str:\n weekday = date.weekday()\n return calendar.day_name[weekday]", "def nth_dow_to_day(tupel, y):\r\n m = tupel[0]\r\n dow = tupel[1]\r\n n = tupel[2]\r\n\r\n if dow == 7:\r\n dow = 0\r\n\r\n first_dow = date_to_dow(y, m, 1) # the dow of the first of the month\r\n shift = dow - first_dow\r\n if shift < 0:\r\n shift += 7\r\n\r\n return shift + (7 * n) - 6", "def day_of_week_for_start_day(self):\n import calendar\n\n day = self.idfobjects[\"RUNPERIOD\"][0][\"Day_of_Week_for_Start_Day\"]\n\n if day.lower() == \"sunday\":\n return calendar.SUNDAY\n elif day.lower() == \"monday\":\n return calendar.MONDAY\n elif day.lower() == \"tuesday\":\n return calendar.TUESDAY\n elif day.lower() == \"wednesday\":\n return calendar.WEDNESDAY\n elif day.lower() == \"thursday\":\n return calendar.THURSDAY\n elif day.lower() == \"friday\":\n return calendar.FRIDAY\n elif day.lower() == \"saturday\":\n return calendar.SATURDAY\n else:\n return 0", "def format_day(value):\n if value:\n day = dateutil.parser.parse(value)\n # pylint: disable=E1101\n return day.strftime(\"%B %d %Y\")\n else:\n return 'N/A'", "def case_event_date_day_represent(value):\n\n return S3DateTime.date_represent(value, utc=True)", "def date_to_dow(y, m, d):\r\n # Python uses Monday week start, so wrap around\r\n w = calendar.weekday(y, m, d) + 1\r\n if w == 7:\r\n w = 0\r\n return w", "def get_weekday_number(date):\n return date.strftime('%w')", "def convert(v):\n\n if type(v) is str and rexp.match(v):\n return as_date(v)\n return v", "def fed_holiday(df):\n\n if (df[\"Date\"].month == 1) & (df[\"Date\"].day == 1):\n return \"New Year's Day\"\n elif (df[\"Date\"].month == 1) & (15 <= df[\"Date\"].day <= 21) & (df[\"Date\"].dayofweek == 1):\n return \"Martin Luther King Day\"\n elif (df[\"Date\"].month == 2) & (df[\"Date\"].day == 18):\n return \"President's Day\"\n elif (df[\"Date\"].month == 5) & (25 <= df[\"Date\"].day <= 31) & (df[\"Date\"].dayofweek == 1):\n return \"Memorial Day\"\n elif (df[\"Date\"].month == 7) & (df[\"Date\"].day == 4):\n return \"Independence Day\"\n elif (df[\"Date\"].month == 9) & (1 <= df[\"Date\"].day <= 7) & (df[\"Date\"].dayofweek == 1):\n return \"Labor Day\"\n elif (df[\"Date\"].month == 10) & (8 <= df[\"Date\"].day <= 14) & (df[\"Date\"].dayofweek == 1):\n return \"Columbus Day\"\n elif (df[\"Date\"].month == 11) & (df[\"Date\"].day == 11):\n return \"Veterans Day\"\n elif (df[\"Date\"].month == 11) & (22 <= df[\"Date\"].day <= 28) & (df[\"Date\"].dayofweek == 4):\n return \"Thanksgiving Day\"\n elif (df[\"Date\"].month == 12) & (df[\"Date\"].day == 25):\n return \"Christmas Day\"\n else:\n return \"Non-holidays\"", "def naturaldate(value):\r\n try:\r\n value = date(value.year, value.month, value.day)\r\n except AttributeError:\r\n # Passed value wasn't date-ish\r\n return value\r\n except (OverflowError, ValueError):\r\n # Date arguments out of range\r\n return value\r\n delta = abs_timedelta(value - date.today())\r\n if delta.days >= 365:\r\n return naturalday(value, '%b %d %Y')\r\n return naturalday(value)", "def day(dt: datetime.datetime) -> str:\n day: str = dt.strftime(\"%A\")\n return day", "def weekday(self, dt):\n days = {\n 0: self.MONDAY,\n 1: self.TUESDAY,\n 2: self.WEDNESDAY,\n 3: self.THURSDAY,\n 4: self.FRIDAY,\n 5: self.SATURDAY,\n 6: self.SUNDAY\n }\n return days.get(dt.weekday())", "def get_day(today: Day, weekday_number: int) -> Day:\n assert type(today) is Day\n assert type(weekday_number) is int\n\n today = today.to_date_time()\n date_list = list(rrule(DAILY, count=1, wkst=MO, byweekday=weekday_number, dtstart=today))\n if date_list:\n return Day(date_list[0])", "def _to_ceiling_busi_day(date):\n try:\n date = parse(date)\n except TypeError:\n date = date\n\n date = date.replace(day=1)\n ceiling_busi_day = pd.date_range(date, periods=1, freq='BM').strftime('%Y-%m-%d')[0]\n return ceiling_busi_day", "def meetup_day(year, month, day_of_week, day_occurrence):\n \n cal = calendar.monthcalendar(year, month)\n day_of_week_index = days_of_week[day_of_week]\n \n not_teenth = day_occurrence != 'teenth'\n day_is_in_first_week = cal[0][day_of_week_index] != 0\n \n if not_teenth and day_is_in_first_week:\n week_index = week_indices[day_occurrence]\n \n elif not_teenth and not day_is_in_first_week:\n week_index = week_indices[day_occurrence] + 1\n \n else:\n for i in range(len(cal)):\n if cal[i][day_of_week_index] >= 10:\n week_index = i\n break\n\n date = cal[week_index][day_of_week_index]\n return datetime.date(year, month, date)", "def holiday_type() -> Holiday:\n return Holiday.CHRISTMAS", "def isoweekday(self):\n return 0", "def isoweekday(self):\n return 0", "def test_mjd2dow():\n\tmjd = [34923.0,58130.5]\n\tdnum_true = [3,5]\n\tdnam_true = np.array(['Wed','Fri'],dtype='|S3')\n\tdnum_test, dnam_test = date_functions.mjd2dow( mjd )\n\t\n\tassert dnum_test[0] == dnum_true[0]\n\tassert dnum_test[1] == dnum_true[1]\n\tassert dnam_test[0] == dnam_true[0]\n\tassert dnam_test[1] == dnam_true[1]", "def _to_floor_busi_day(date):\n try:\n date = parse(date)\n except TypeError:\n date = date\n\n date = date + relativedelta(months=-1)\n date = date.replace(day=1)\n floor_busi_day = pd.date_range(date, periods=1, freq='BM').strftime(\"%Y-%m-%d\")[0]\n\n return floor_busi_day", "def holiday_type() -> Holiday:\n return Holiday.EASTER", "def day_name(x):\r\n if x==0:\r\n return \"Sunday\"\r\n elif x==1:\r\n return \"Monday\"\r\n elif x==2:\r\n return \"Tuesday\"\r\n elif x==3:\r\n return \"Wednesday\"\r\n elif x==4:\r\n return \"Thursday\"\r\n elif x==5:\r\n return \"Friday\"\r\n elif x==6:\r\n return \"Saturday\"", "def Day_of_week(day, month, year):\r\n if year % 4 == 0 and (year % 400 == 0 or year % 100 != 0):\r\n doomsday = [11, 29, 21, 4, 9, 6, 11, 8, 5, 10, 7, 12]\r\n else:\r\n doomsday = [10, 28, 21, 4, 9, 6, 11, 8, 5, 10, 7, 12]\r\n exact_day = ((day - doomsday[month-1]) + Dooms_day(year)) % 7\r\n character_day = [\"Sunday\", \"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \r\n \"Friday\", \"Saturday\"]\r\n return character_day[exact_day]", "def convert_date(date):\n if isinstance(date, datetime.date):\n return date\n elif isinstance(date, str):\n match = DATE_PATTERN.match(date)\n if match:\n groups = match.groups()\n if len(groups) == 3:\n return datetime.date(\n year=int(\n groups[0]), month=int(\n groups[1]), day=int(\n groups[2]))\n return None", "def admission_dow(df, claims=False):\n if claims:\n df[\"dow\"] = df[\"first_service_date\"].dt.weekday\n else:\n df[\"dow\"] = df[\"admission_date\"].dt.weekday\n\n df[\"dow\"].replace(\n {\n 0: \"Monday\",\n 1: \"Tuesday\",\n 2: \"Wednesday\",\n 3: \"Thursday\",\n 4: \"Friday\",\n 5: \"Saturday\",\n 6: \"Sunday\",\n },\n inplace=True,\n )\n\n return df", "def date_with_day_of_week_appended(mydate): \n import datetime\n month, day, year = (int(x) for x in mydate.split('/')) \n shortened_year = abs(year) % 100 \n day_of_week = datetime.date(year, month, day).strftime(\"%A\")\n return \"%s/%s/%s %s\" % (month,day,shortened_year, day_of_week)", "def create_date_feature_daytime(df = None, date = None):\n df[date] = pd.to_datetime(df[date])\n df['dayOfWeek'] = df[date].dt.dayofweek\n df['dayOfMonth'] = df[date].dt.day #???\n df['year'] = df[date].dt.year\n df['month'] = df[date].dt.month\n return df", "def case_event_date_day(self, row):\n\n if hasattr(row, \"dvr_case_event\"):\n row = row.dvr_case_event\n\n try:\n date = row.date\n except AttributeError:\n date = None\n\n if date:\n # Get local hour\n date = date.replace(tzinfo=self.UTC).astimezone(self.LOCAL)\n hour = date.time().hour\n\n # Convert to date\n date = date.date()\n if hour <= 7:\n # Map early hours to previous day\n return date - datetime.timedelta(days=1)\n else:\n date = None\n return date", "def day_of_week(dt):\n cday = dt\n mday = 2\n uday = cday.isocalendar()[2] + mday\n try:\n if uday > 7:\n CURRDAY = uday - 7\n log.debug(\"1;EME;RUNNING;000;Scheduler.py;Setting customized day of week>7 : \", CURRDAY)\n else:\n CURRDAY = uday\n log.debug(\"1;EME;RUNNING;000;Scheduler.py;Setting customized day of week : \", CURRDAY)\n return CURRDAY\n except Exception as e:\n log.exception(\"1;EME;FAILURE;700;SCHEDULE ERROR \" + str(e), exc_info=False)\n sys.exit(0)", "def _DayNumToWeekdayNum(daynum):\n return (daynum + _WEEKDAY_BASE) % NUM_WEEKDAYS", "def interpret_date( text ):\n try:\n as_arrow = arrow.get(text, \"MM/DD/YYYY\").replace(\n tzinfo=tz.tzlocal())\n except:\n flask.flash(\"Date '{}' didn't fit expected format 12/31/2001\")\n raise\n return as_arrow.isoformat()", "def get_day_of_week_string(date_string):\n\n # Split on / string, and feed to a datetime object, to use weekday function\n date_strings = date_string.split(\"/\")\n update_date = datetime.datetime(int(date_strings[2]), int(date_strings[1]), int(date_strings[0]))\n weekDays = (\"Mon\", \"Tue\", \"Wed\", \"Thur\", \"Fri\", \"Sat\", \"Sun\")\n day_of_week = str(weekDays[update_date.weekday()])\n return day_of_week", "def convertDate(indate):\n a = datetime.datetime.fromtimestamp(indate / 1000.0)\n a_str = a.strftime('%m/%d/%y')\n return datetime.datetime.strptime(a_str, '%m/%d/%y').date()", "def day_of_week():\n return calendar.day_name[datetime.date.today().weekday()]", "def get_day_string(self, date_obj):\n return date_obj.strftime('%A')[:3].upper()", "def dow(self):\n comparator = Date(11, 12, 2014) # known to be a 'Wednesday'\n DOW = ['Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday', 'Monday', 'Tuesday']\n diff = self.diff(comparator)\n return DOW[diff % 7]", "def date_to_python_date(date):\n try:\n ret_date = datetime.datetime.strptime(date, \"%d/%m/%y\")\n except ValueError:\n #another format -- a year can also have four digits\n ret_date = datetime.datetime.strptime(date, \"%d/%m/%Y\")\n return ret_date", "def set_day_state(self, day_or_night):\r\n\t\tif day_or_night == 1: return 'a.m'\r\n\t\tif day_or_night == 0: return 'p.m'", "def convert_str_to_date(date_str):\n if date_str.lower() == 'tomorrow':\n return datetime.date.today() + datetime.timedelta(days=1)\n elif date_str.lower() == 'today':\n return datetime.date.today()\n elif date_str.lower() == 'yesterday':\n return datetime.date.today() + datetime.timedelta(days=-1)\n elif date_str.lower() in day_values:\n return next_weekday(date_str)\n # Otherwise, process as a three-part date\n part_list = date_str.split()\n day = part_list[1].replace('th', '').replace('rd', '').replace('st', '')\n processed_date_str = ' '.join([part_list[0], day, part_list[2]])\n return datetime.datetime.strptime(processed_date_str, DATE_STR_FMT).date()", "def normalize_date(__date, type='arrival'):\n\n if isinstance(__date, datetime.datetime):\n # If type is arrival pass RESERVATION_START_TIME as tup else RESERVATION_END_TIME as tup\n if type == 'arrival':\n tup = RESERVATION_START_TIME\n else:\n tup = RESERVATION_END_TIME\n\n __date = datetime.datetime(__date.year, __date.month, __date.day,\n tup[0], tup[1], tup[2])\n\n return __date\n return None", "def day_from_string(date_str):\n date = dateutil.parser.parse(date_str)\n if date.tzinfo:\n date = pytz.utc.normalize(date).replace(tzinfo=None)\n return date.replace(microsecond=0, second=0, minute=0, hour=0)", "def get_date_of_weekday(weekday, week_number=-1, week_delta=0):\n days_to_add = days_to_add_to_monday(weekday)\n if week_number > -1:\n monday = get_monday_date_from_week_number(week_number)\n # result = monday + timedelta(days=days_to_add) + timedelta(weeks=week_delta)\n else:\n monday = get_monday_date(week_delta)\n # result = get_date(monday, weekday)\n result = monday + timedelta(days=days_to_add) + timedelta(weeks=week_delta)\n return result", "def fetch_day(day, timezone):\n\n try:\n if day == 'today':\n return pendulum.now(tz=timezone)\n elif day == 'tomorrow':\n return pendulum.tomorrow(tz=timezone)\n elif day == 'yesterday':\n return pendulum.yesterday(tz=timezone)\n elif re.match(\"^(0?[1-9]|[12][0-9]|3[01])/(0?[1-9]|1[0-2])/\\d\\d$\", day):\n return pendulum.from_format(day, 'DD/MM/YY', tz=timezone)\n except Exception as e:\n logging.warning(e)\n return \"Seems like there's some problem with the date input.\"", "def get_day():\n return handle_invalid_inputs(question_4, days)", "def turn2type(value,t):\n k = None\n if (t==\"str\"):\n\tk = value\n if (t==\"int\"):\n\tk = int(value)\n if (t==\"float\"):\n\tk = float(value)\n if (t==\"date\"):\n\tk = time.mktime(datetime.datetime.strptime(value, \"%m/%d/%Y\").timetuple())\n return k", "def format_dow(value):\n if value:\n return [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday',\n ][value]\n else:\n return 'N/A'", "def getDay(self):\n return _libsbml.Date_getDay(self)", "def day_fromordinal(cls, ordinal):\n return (ordinal - cls.EPOCH) % 210", "def get_date(date):\n return date", "def _calculate_date(day_of_year):\n date = datetime.datetime.strptime(str(day_of_year), '%j')\n return date.strftime('%d-%b')", "def isoweekday(self, *args, **kwargs): # real signature unknown\r\n pass", "def get_weekday():\n result = datetime.today().weekday() + 1\n return result", "def str_2_date( sdate ):\r\n if isinstance( sdate, str ):\r\n for fmt in ( \"%Y-%m-%d\", \"%m/%d/%Y\" ):\r\n try:\r\n return datetime.strptime( sdate, fmt ).date()\r\n except ValueError:\r\n pass\r\n else:\r\n return sdate", "def date_to_operate_format(self, date):\n date = date.replace(\" \", \"\")\n date = date.split(',')\n day = date[1]\n month = date[2]\n\n day = self.check_and_repair_right_format(day)\n month = self.check_and_repair_right_format(month)\n\n right_format = date[0] + month + day\n return right_format", "def find_date(startdate, weekday, weeknumber):\n import datetime\n # The +1 makes this match up with linux times (day 1 = Monday)\n daysahead = weekday - (startdate.weekday() + 1)\n if daysahead < 0:\n # Target day already happened this week\n daysahead += 7\n # Add 7 days for each Week Of Month we want - but 'This' week is week 1\n daysahead += 7 * (weeknumber - 1)\n return startdate + datetime.timedelta(daysahead)", "def isoweekday(self):\n # 1-Jan-0001 is a Monday\n return self.toordinal() % 7 or 7", "def unify_date_format(date):\n if type(date) == str:\n try:\n date = dateutil.parser.parse(date) \n except:\n pass\n return date", "def set_week_day(self, wday):\r\n\t\twdays = ['Domingo', 'Lunes', 'Martes', 'Miercoles',\r\n\t\t\t\t 'Jueves', 'Viernes', 'Sabado']\r\n\t\tfor i in range(7):\r\n\t\t\tif wday == i: \r\n\t\t\t\treturn wdays[i]", "def check_weekday_of_date(self, date):\n return date.isoweekday() % 7", "def _read_sansculottide_date(match):\n day_string = match.group(1)\n d = None\n\n for n, candidate in enumerate(names.sans_culottides):\n if candidate.sanitized == day_string:\n d = n\n break\n else:\n return\n\n y = roman_to_decimal(match.group(2))\n\n return (y, 13, d)", "def convert_week_number_to_date(week_number, first_monday, weekday=0):\n assert(1 <= week_number <= 52)\n assert(0 <= weekday <= 6)\n first_gehol_year_day = datetime.strptime(first_monday, \"%d/%m/%Y\")\n num_days = (week_number-1) * 7 + weekday\n dt = timedelta(days = num_days)\n return first_gehol_year_day + dt", "def interpret_date(text):\n try:\n as_arrow = arrow.get(text, \"MM/DD/YYYY\").replace(\n tzinfo=tz.tzlocal())\n except:\n flask.flash(\"Date '{}' didn't fit expected format 12/31/2001\")\n raise\n return as_arrow.isoformat()", "def convert_date(self, date=None):\n if date is not None:\n format_str = '%d/%m/%Y'\n converted_date = datetime.strptime(date, format_str)\n return converted_date.date()", "def get_day_today() -> str:\n day = datetime.now().strftime(\"%w\")\n if day == '0': # Sunday\n return '6'\n elif day == '6': # Saturday\n return '5'\n elif day == '1': # Monday\n return '0'\n elif day == '2': # Tuesday\n return '1'\n elif day == '3': # Wednesday\n return '2'\n elif day == '4': # Thursday\n return '3'\n elif day == '5': # Friday\n return '4'", "def get_day(self):\n\n # First we get the first 8 bits stored in the day register\n # and translate it to an integer\n day_bcd = self.__read_register(_REGISTER_DAY)\n\n # Then we extract the digits and the tens\n tens = (day_bcd & 0x30) >> 4 # 0x30 = 0b00110000\n digit = (day_bcd & 0x0F) # 0x0F = 0b00001111\n\n # End return the last value\n return 10 * (tens) + digit", "def convert_date_of_attendance(attendance):\n if isinstance(attendance,list):\n for a in attendance:\n a.date_of_att = datetime.datetime.strptime(a.DATE_OF_ATTENDANCE,'%d/%m/%Y').date()\n elif isinstance(attendance,models.AttendanceModel):\n attendance.date_of_att = datetime.datetime.strptime\\\n (attendance.DATE_OF_ATTENDANCE, '%d/%m/%Y').date()", "def WEEKNUM(date, return_type=1):\n if return_type == 21:\n return ISOWEEKNUM(date)\n if return_type not in _weekday_type_map:\n raise ValueError(\"Invalid return type %s\" % (return_type,))\n (first, index) = _weekday_type_map[return_type]\n date = _make_datetime(date)\n jan1 = datetime.datetime(date.year, 1, 1)\n week1_start = jan1 - datetime.timedelta(days=(jan1.weekday() - first) % 7)\n return (date - week1_start).days // 7 + 1", "def _format_date(input_date, day_flag, sep_char=\"-\"):\n date_iso = input_date[6:10] + sep_char + input_date[0:2]\n if day_flag:\n date_iso = date_iso + sep_char + input_date[3:5]\n return date_iso", "def meetup_day(year, month, dow, wom):\n first_dow = monthrange(year, month)[0]\n days_in_month = monthrange(year, month)[1]\n possible_dates = []\n print str(year) + str(month) + dow + wom\n\n \"\"\"Build dictionary of possible dates based on dow\"\"\"\n for day in range(1, days_in_month+1):\n if datetime.date(year, month, day).strftime(\"%A\") == dow:\n print day\n possible_dates.extend([day])\n\n \"\"\"Perform logic on wom constraint\"\"\"\n if wom == \"teenth\":\n for day in possible_dates:\n if day > 12 and day < 20:\n return datetime.date(year, month, day)\n elif wom == \"last\":\n return datetime.date(year, month, possible_dates[-1])\n else:\n return datetime.date(year, month, possible_dates[ int(wom[:1]) - 1 ])" ]
[ "0.65448165", "0.6300779", "0.61504966", "0.61332077", "0.612294", "0.6048355", "0.60400957", "0.6037049", "0.60039747", "0.5941737", "0.5941737", "0.5905311", "0.5904082", "0.58850664", "0.5838948", "0.58364326", "0.58314884", "0.580064", "0.5735178", "0.5730053", "0.5729248", "0.57268554", "0.57084054", "0.5691262", "0.5684406", "0.56818694", "0.56695473", "0.56529087", "0.5635871", "0.5627405", "0.5612051", "0.55971324", "0.557004", "0.55655885", "0.5562843", "0.5558236", "0.5550678", "0.5546964", "0.55377346", "0.55370724", "0.552282", "0.55145484", "0.5511881", "0.55023015", "0.55020934", "0.5501638", "0.549292", "0.54914963", "0.54914963", "0.5464919", "0.5464347", "0.5458034", "0.5457369", "0.5454665", "0.5452623", "0.5428693", "0.54235107", "0.54201937", "0.5397793", "0.5388257", "0.53876317", "0.53706807", "0.5358964", "0.5356815", "0.53399444", "0.5339289", "0.533319", "0.53331447", "0.5324092", "0.5321155", "0.53207994", "0.53177327", "0.53161424", "0.5314768", "0.5306661", "0.5301909", "0.52997845", "0.52800256", "0.5274151", "0.5270926", "0.52705777", "0.5270248", "0.52609473", "0.5258276", "0.5254075", "0.5249743", "0.52457696", "0.52440566", "0.5240796", "0.52385634", "0.5237417", "0.5236338", "0.5226252", "0.5223298", "0.5218928", "0.5214445", "0.5198958", "0.51968515", "0.51913536", "0.518577" ]
0.73416775
0
This function generates the weather conditions using a minitype trapezium distribution the temperature and its type
def generate_weather_conditions(temperature, temp_type): if temp_type == "MIN" or temperature < 5: if temperature > 10: return 0 elif temperature >= 0: return (10.-temperature)/10. else: return 1 elif temp_type == "AVG": if temperature > 25: return 0 elif temperature >= 15: return (25.-temperature)/(25.-15) elif temperature >= 5: return (temperature-5.)/(15-5.) elif temp_type == "MAX": if temperature > 40: return 1 elif temperature >= 20: return (temperature-20)/(40.-20) else: return 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def genWeather():\n\n weather = random.choice(weather_conditions.keys())\n condition = weather_conditions[weather]\n (tMax, tMin) = condition[\"temperature\"]\n (pMax, pMin) = condition[\"pressure\"]\n (hMax, hMin) = condition[\"humidity\"]\n\n return weather + \"|\" + str(round(random.uniform(tMax, tMin), 1)) + \"|\" + \\\n str(round(random.uniform(pMax, pMin), 1)) + \"|\" + \\\n str(random.randrange(hMax, hMin, -1))", "def generate_condition_data(self):\n # set 'Conditions' column to NA\n self.output['Conditions'] = 'NA'\n\n # instantiate new MarkovChain object\n MC = MarkovChain()\n\n # apply forecast function on 'Conditions' column based on temperature\n # and humidity values for each observation period\n params = self.output[[\"Temperature\", \"Humidity\"]]\n self.output[['Conditions']] = params.apply(\n lambda x: MC.forecast_weather(x.values[0], x.values[1]), axis=1)", "def weather_of_weather_type(city):\n # get all weather type, and make a map relation\n check_all_type('weather_type')\n relation_type_map = {'้˜ด': '้˜ด', 'ๅฐ้›จ่ฝฌ้˜ด': 'ๅฐ้›จ', 'ไธญ้›จ': 'ไธญ้›จ', 'ๅฐ้›จ': 'ๅฐ้›จ',\n 'ๅคšไบ‘่ฝฌๅฐ้›จ': 'ๅฐ้›จ', 'ไธญ้›ช': '้›ช', 'ๅฐ้›จ่ฝฌ้›ช': 'ๅฐ้›จ',\n 'ๅคšไบ‘': 'ๅคšไบ‘', '้œพ': '้œพ', 'ๆ™ด': 'ๆ™ด', '้˜ด่ฝฌๅฐ้›จ': 'ๅฐ้›จ'}\n relation_index = {big_type: index for index, big_type in enumerate(sorted(set(relation_type_map.values())))}\n relation_index_map = {small_type: relation_index[big_type] for small_type, big_type in relation_type_map.items()}\n # use relation map to get a weather npy file\n\n time_index = np.load(exp_data_path + os.sep + 'station_list' + os.sep + 'time_index.npy', allow_pickle=True)\n time_index = dict(time_index.tolist())\n numpy_res = np.empty((len(time_index['index']),))\n with open(exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_weather_type.csv'.format(city)) as f:\n reader = csv.reader(f)\n for line in reader:\n numpy_res[int(line[0])] = relation_index_map[line[1]]\n\n file_name = exp_data_path + os.sep + 'weather' + os.sep + city + os.sep + '{}_weather_type'.format(city)\n if os.path.exists(file_name):\n os.remove(file_name)\n np.save(file_name, numpy_res)", "def buildWeatherFromType(type, environment, turns=-1, forever=True):\r\n weatherClass = WeatherFactory.weatherTypeDictionary[type]\r\n return weatherClass(overCallbackFunction=environment.clearWeather, turns=turns, forever=forever)", "def thermal(isatom, freq, scalfac,linnonlin,T):\n if isatom != \"true\":\n nfreq = len(freq)\n\n vib_temp = []\n for ifreq in range(nfreq):\n freq[ifreq] = float(freq[ifreq]) * float(scalfac)\n vib_temp_new = c * 100.0 * h * float(freq[ifreq]) / kB\n vib_temp.append(vib_temp_new)\n\n dE_vib = 0\n for ifreq in range(nfreq):\n dE_vib = dE_vib + kB * vib_temp[ifreq] * j2au * ( 0.5 + 1 / ( np.exp(vib_temp[ifreq]/T) - 1) )\n\n dE_ZPE = 0.5 * sum(freq) * cmi2au\n\n if linnonlin == \"L\":\n dE_rot = kB * T * j2au\n elif linnonlin == \"NL\":\n dE_rot = kB * T * j2au * (3.0/2.0)\n else:\n with open(\"Thermochemistry.out\", \"a\") as ther_chem:\n ther_chem.write(\"ERROR: unknown entry for linear/nonlinear\")\n else:\n dE_ZPE = 0\n dE_vib = 0\n dE_rot = 0\n\n dE_tra = kB * T * j2au * (3.0/2.0)\n dE_thermal = (dE_vib - dE_ZPE) + dE_rot + dE_tra\n\n return(dE_ZPE, dE_vib, dE_rot, dE_tra, dE_thermal)", "def generate_heterogeneous_wind_map(self):\n speed_multipliers = self.heterogenous_inflow_config['speed_multipliers']\n x = self.heterogenous_inflow_config['x']\n y = self.heterogenous_inflow_config['y']\n z = self.heterogenous_inflow_config['z']\n\n if z is not None:\n # Compute the 3-dimensional interpolants for each wind direction\n # Linear interpolation is used for points within the user-defined area of values,\n # while the freestream wind speed is used for points outside that region\n in_region = [\n LinearNDInterpolator(list(zip(x, y, z)), multiplier, fill_value=1.0)\n for multiplier in speed_multipliers\n ]\n else:\n # Compute the 2-dimensional interpolants for each wind direction\n # Linear interpolation is used for points within the user-defined area of values,\n # while the freestream wind speed is used for points outside that region\n in_region = [\n LinearNDInterpolator(list(zip(x, y)), multiplier, fill_value=1.0)\n for multiplier in speed_multipliers\n ]\n\n self.het_map = in_region", "def T_c(I, T_amb, V, D, R_list, N_cond=1, T_range=[298,323,348], a_s=0.9, e_s=0.9, I_sun=900.0, temp_factor=1, wind_factor=1, n_iter=10):\n\n # def Q_gen(I, R):\n # w = I * I * R\n # return w\n\n # def Q_rad_in(I_sun, A_s, a_s):\n # w = I_sun * D * a_s\n # return w\n\n # def Q_conv(htcoeff, A_s, T_lin, T_amb):\n # w = htcoeff * A_s * (T_line - T_amb)\n # return w\n\n # def Q_rad_out(e_s, A_s, sigma, T_line, T_amb):\n # w = e_s * D * sigma * (T_line**4 - T_amb**4)\n # return w\n\n def reynolds(V, D, v, Mair=1.103):\n r = V * D / v\n return r\n\n def nusselt(Re, Pr):\n a = 0.62 * ( (Re) ** (1.0/2.0) ) * ( Pr ** (1.0/3.0) )\n b = (1 + (0.4/(Pr**(2.0/3.0) ) ) ) ** (1.0/4.0)\n c = (Re / 282000) ** (5.0/8.0)\n n = 0.3 + (a/b) * ( (1 + c) ** (4.0/5.0) )\n return n\n\n def air_prop(T_amb):\n # temp v k Pr\n air_prop = np.array([[200, 7.59e-6, 18.1e-3, 0.737],\n [250, 11.44e-6, 22.3e-3, 0.720],\n [300, 15.89e-6, 26.3e-3, 0.707],\n [350, 20.92e-6, 30.0e-3, 0.700],\n [400, 26.41e-6, 33.8e-3, 0.690],\n [450, 32.39e-6, 37.3e-3, 0.686],\n [500, 38.79e-6, 40.7e-3, 0.684],\n [550, 45.57e-6, 43.9e-3, 0.683],\n [600, 52.69e-6, 46.9e-3, 0.685]])\n\n v, k, Pr = np.apply_along_axis(lambda x: np.interp(T_amb, air_prop[:,0], x),\n 0, air_prop[:,1:])\n return v, k, Pr\n\n def R_T(R_lo, R_mid, R_hi, T_line, N_cond, T_range=T_range):\n if 273 <= T_line <= 323:\n R = ((R_lo + \n ((R_lo - R_mid)/(T_range[0] - T_range[1]))\n *(T_line - T_range[0]))/N_cond)\n elif T_line > 323:\n R = ((R_mid + \n ((R_mid - R_hi)/(T_range[1] - T_range[2]))\n *(T_line - T_range[1]))/N_cond)\n else:\n R = R_lo\n print('Out of bounds')\n return R\n\n R_lo, R_mid, R_hi = R_list[0], R_list[1], R_list[2]\n temp_factor = 1\n wind_factor = 1\n sigma = 5.6703e-8 # Stefan-Boltzmann constant\n\n T_amb = T_amb*temp_factor\n V = V*wind_factor\n\n v, k, Pr = air_prop(T_amb)\n Re = reynolds(V, D, v)\n htcoeff = nusselt(Re, Pr) * k / D\n\n def T_line(T_init):\n \n R = R_T(R_lo, R_mid, R_hi, T_init, N_cond)\n print R\n\n C4 = e_s * sigma * D * math.pi\n C3 = 0.0\n C2 = 0.0\n C1 = htcoeff * D * math.pi\n C0 = - ( I ** 2 * R\n + I_sun * a_s * D\n + htcoeff * D * math.pi * T_amb\n + e_s * D * math.pi * sigma * (T_amb ** 4))\n\n return np.roots([C4, C3, C2, C1, C0])\n\n T_c = T_amb\n \n for i in range(n_iter):\n T_arr = T_line(T_c)\n T_c = np.real(T_arr[np.where((np.real(T_arr) > 0) & ~(np.iscomplex(T_arr)))]).mean()\n print T_c\n\n return T_c", "def make_demand_tensor(city):\n # remove file\n # filter_station(city)\n\n\n # calculate each time unit for each city, and get a average count map\n # 0. get station number map {station: [num1, num2, num3]}(num3 == num1 + num2)\n # 1. get all record into a map {time unit: {station: [amount1, count1, amount2, count2, amount3, count3]}}\n # 2. combine two maps, then change into {time unit: {station: [emp1, num1, r1, emp2, num2, r2, emp3, num3, r3]}}\n number_map = {}\n for file in os.listdir(exp_data_path + os.sep + 'station' + os.sep + city):\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file) as f:\n reader = csv.reader(f)\n for line in reader:\n if line[0] not in number_map:\n number_map[line[0]] = [line[2],line[5],line[8]]\n if len(number_map) == station_count[city]:\n break\n\n count_map = {}\n for file in os.listdir(exp_data_path + os.sep + 'station' + os.sep + city):\n time_unit = file[8:12]\n if time_unit not in count_map:\n count_map[time_unit] = {}\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file) as f:\n reader = csv.reader(f)\n for line in reader:\n if len(line) < 10:\n continue\n station_id = line[0]\n amount1, amount2, amount3 = float(line[3]), float(line[6]), float(line[9])\n if station_id not in count_map[time_unit]:\n count_map[time_unit][station_id] = [0, 0, 0, 0, 0, 0]\n count_map[time_unit][station_id][0] += amount1\n count_map[time_unit][station_id][2] += amount2\n count_map[time_unit][station_id][4] += amount3\n count_map[time_unit][station_id][1] += 1\n count_map[time_unit][station_id][3] += 1\n count_map[time_unit][station_id][5] += 1\n\n average_map = {}\n for time_unit, station_map in count_map.items():\n average_map[time_unit] = {}\n for station_id, info in station_map.items():\n rate1 = count_map[time_unit][station_id][0] / count_map[time_unit][station_id][1]\n rate2 = count_map[time_unit][station_id][2] / count_map[time_unit][station_id][3]\n rate3 = count_map[time_unit][station_id][4] / count_map[time_unit][station_id][5]\n count1 = int(number_map[station_id][0])\n count2 = int(number_map[station_id][1])\n count3 = int(number_map[station_id][2])\n emp1, emp2, emp3 = math.ceil(rate1 * count1), math.ceil(rate2 * count2), math.ceil(rate3 * count3)\n average_map[time_unit][station_id] = [station_id, emp1, count1, rate1, emp2, count2, rate2, emp3, count3, rate3]\n\n\n # fill the missing record according to the above map\n num = 0\n for file in os.listdir(exp_data_path + os.sep + 'station' + os.sep + city):\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file) as f:\n count = len(f.readlines())\n if count == station_count[city]:\n continue\n num += 1\n print('fill for', file)\n print('before', count)\n\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file) as f:\n reader = csv.reader(f)\n a = [int(line[0]) for line in reader]\n\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file, \"a\") as f:\n writer = csv.writer(f)\n diff_set = set(range(0, station_count[city])) - set(a)\n time_unit = file[8:12]\n for station_id in sorted(diff_set):\n writer.writerow(average_map[time_unit][str(station_id)])\n\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file, \"r\") as f:\n print('after', len(f.readlines()))\n print('fill', num, 'files')\n\n\n # change csv file into npy file\n time_index_map = np.load(exp_data_path + os.sep + 'station_list' + os.sep + 'time_index.npy', allow_pickle=True)\n time_index_map = dict(time_index_map.tolist())\n\n numpy_all_data = np.empty((station_count[city], len(time_index_map['index']), 6))\n numpy_data = np.empty((station_count[city], len(time_index_map['index']), 3))\n for file in os.listdir(exp_data_path + os.sep + 'station' + os.sep + city):\n time_index = int(time_index_map['rev_index'][file[0:12]])\n with open(exp_data_path + os.sep + 'station' + os.sep + city + os.sep + file) as f:\n reader = csv.reader(f)\n for line in reader:\n station_id = int(line[0])\n try:\n # ไบคๆต(ๆ…ขๅ……)๏ผŒ็›ดๆต๏ผˆๆ…ขๅ……๏ผ‰๏ผŒๆ€ปๆ•ฐ\n count1, count2, count3 = int(line[2]), int(line[5]), int(line[8])\n # ๆ…ขๅ……้œ€ๆฑ‚๏ผŒๅฟซๅ……้œ€ๆฑ‚๏ผŒๆ€ป้œ€ๆฑ‚\n rate1, rate2, rate3 = float(line[3]), float(line[6]), float(line[9])\n except:\n print(file, line)\n break\n numpy_data[station_id, time_index] = [rate1, rate2, rate3]\n numpy_all_data[station_id, time_index] = [count1, rate1, count2, rate2, count3, rate3]\n\n data_file_name = exp_data_path + os.sep + 'station' + os.sep + 'demand_{}'.format(city)\n if os.path.exists(data_file_name):\n os.remove(data_file_name)\n np.save(data_file_name, numpy_data)\n print(numpy_data.shape)\n print(numpy_all_data.shape)\n\n all_data_file_name = exp_data_path + os.sep + 'station' + os.sep + 'all_demand_{}'.format(city)\n if os.path.exists(all_data_file_name):\n os.remove(all_data_file_name)\n np.save(all_data_file_name, numpy_all_data)\n pass", "async def test_temp_unit_fix(\n hass: HomeAssistant,\n client,\n climate_radio_thermostat_ct101_multiple_temp_units,\n climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints,\n integration,\n) -> None:\n state = hass.states.get(\"climate.thermostat\")\n assert state\n assert state.attributes[\"current_temperature\"] == 18.3\n\n state = hass.states.get(\"climate.z_wave_thermostat\")\n assert state\n assert state.attributes[\"current_temperature\"] == 21.1", "async def get_temperatures(self, **kwargs: Any) -> Dict[str, float]:\n\n return {\"M1\": 10.0, \"M2\": 12.0}", "def temporal_distribution(model):\n years = range(model.years[0], model.years[-1] + 1)\n hdd = load_file(model.temporal_files['hdd'], model.resolution, years, regrid_method='intensive')['hdd']\n cdd = load_file(model.temporal_files['cdd'], model.resolution, years, regrid_method='intensive')['cdd']\n\n weights = elec_sector_weights(os.path.join(model.root, model.gcam_db))\n weights = weights[(weights.region.isin(model.inputs.region[model.inputs.sector == 'Electricity'])) &\n (weights.region.isin(model.region_masks.region.data)) &\n (weights.year.isin(model.years))].set_index(['region', 'sector', 'year']\n )['value'].to_xarray().fillna(0).astype(np.float32)\n weights = interp_helper(weights)\n\n region_masks = model.region_masks.sel(region=weights.region)\n\n # this formula is annoying to implement because of the hdd/cdd thresholds and reallocation of weights\n hdd_sums = hdd.sum(dim='month')\n cdd_sums = cdd.sum(dim='month')\n\n # when hdd under threshold but cdd above threshold, cooling percent is added to heating signal\n hdd = xr.where((hdd_sums < 650) & (cdd_sums >= 450), cdd, hdd)\n # when cdd under threshold but hdd above threshold, heating percent is added to cooling signal\n cdd = xr.where((cdd_sums < 450) & (hdd_sums >= 650), hdd, cdd)\n # when neither are above threshold, both are reallocated to other category, and demand does not depend hdd or cdd\n hdd = xr.where((hdd_sums < 650) & (cdd_sums < 450), 1 / 12, hdd)\n cdd = xr.where((hdd_sums < 650) & (cdd_sums < 450), 1 / 12, cdd)\n\n # redo sums based on reallocation\n hdd_sums = hdd.sum(dim='month')\n cdd_sums = cdd.sum(dim='month')\n # prevent 0/0\n hdd_sums = xr.where(hdd_sums != 0, hdd_sums, 1)\n cdd_sums = xr.where(cdd_sums != 0, cdd_sums, 1)\n hdd /= hdd_sums\n cdd /= cdd_sums\n\n distribution = xr.concat([hdd, cdd, xr.full_like(hdd, 1/12)],\n dim=pd.Series(['Heating', 'Cooling', 'Other'], name='sector'))\n\n distribution = distribution.where(region_masks, 0)\n distribution = distribution.dot(weights, dims=('sector', 'region'))\n\n return distribution", "def temperatures():\n\n return station_9281", "def NuGrid_net(self,model_type='delay'):\n\n # Create list of masses and metallicites:\n self.masses = [12.0,15.0,20.0,25.0]\n self.metallicities = [0.02,0.01,0.006,0.001,0.0001]\t\t\n \n # First define names of yield tables and the remnant masses for each metallicity (in solar masses)\n if model_type == 'delay':\n filename=localpath+'input/yields/NuGrid/H NuGrid yields delay_total.txt'\n remnants = {}\n remnants[0.02] = [1.61,1.61,2.73,5.71] # This gives remnant masses for each mass\n remnants[0.01] = [1.61,1.61,2.77,6.05]\n remnants[0.006] = [1.62,1.62,2.79,6.18]\n remnants[0.001] = [1.62,1.62,2.81,6.35]\n remnants[0.0001] = [1.62,1.62,2.82,6.38]\n elif model_type == 'rapid':\n filename = localpath+'input/yields/NuGrid/H NuGrid yields rapid total.txt'\n remnants = {}\n remnants[0.02] = [1.44,1.44,2.70,12.81] # Define remnants from metallicities\n remnants[0.01] = [1.44,1.44,1.83,9.84]\n remnants[0.006] = [1.44, 1.44, 1.77, 7.84]\n remnants[0.001] = [1.44,1.44,1.76,5.88]\n remnants[0.0001] = [1.44,1.44,1.76,5.61]\n else:\n raise ValueError('Wrong type: must be delay or rapid')\n \n # Define which lines in the .txt files to use. \n # This defines cuts starting at each relevant table\n cuts={}\n for z in self.metallicities:\n cuts[z] = [] \n for mass in self.masses:\n txtfile=open(filename,\"r\")\n for line_no,line in enumerate(txtfile):\n if str(mass) in line and str(z) in line:\n cuts[z].append(line_no)\n \n line_end = line_no # Final line\n \n # Create list of elements taken from data-file (from first relevant table)\n data = np.genfromtxt(filename,skip_header=int(cuts[0.02][0])+4,\n skip_footer=line_end-int(cuts[0.02][0])-83,\n dtype=['<U8','<U15','<U15','<U15'])\n \n self.elements = [str(line[0][1:]) for line in data]\n \n self.table={} # Initialize final output\n \n for z in self.metallicities: # Produce subtable for each metallicity\n yield_subtable={}\n yield_subtable['Mass'] = self.masses\n yield_subtable['mass_in_remnants'] = np.divide(np.asarray(remnants[z]),self.masses) # Initialize lists\n for el in self.elements:\n yield_subtable[el] = []\n \n for m_index,mass in enumerate(self.masses): # Create data array for each mass\n unprocessed_mass = mass-remnants[z][m_index] # Mass not in remnants in Msun\n data = np.genfromtxt(filename,skip_header=int(cuts[z][m_index])+4,\n skip_footer=line_end-int(cuts[z][m_index])-83,dtype=['<U8','<U15','<U15','<U15']) # Read from data file\n \n # Now iterate over data-file and read in element names\n # NB: [1:]s are necessary as each element in txt file starts with & \t\t\n for line in data:\n el_name = str(line[0][1:]) # Name of element\n el_yield = float(line[1][1:]) # Yield in Msun\n el_init = float(line[2][1:]) # Initial mass fraction \n el_net = el_yield-el_init*unprocessed_mass\n yield_subtable[el_name].append(el_net/mass) # Net mass fraction\n \n # Calculate summed net yield - should be approximately 0\t\n summed_yields = np.zeros(len(self.masses))\n for el in self.elements:\n yield_subtable[el] = np.asarray(yield_subtable[el])\n summed_yields+=yield_subtable[el]\n \n # Compute mass not in remnants with summed net yield small correction\t\t\n yield_subtable['unprocessed_mass_in_winds'] = 1.0-yield_subtable['mass_in_remnants']-summed_yields\n \n # Restructure dictionary into record array for output\n all_keys = ['Mass','mass_in_remnants','unprocessed_mass_in_winds']+self.elements\n list_of_arrays = [yield_subtable[key] for key in all_keys]\n restructure_subtable = np.core.records.fromarrays(list_of_arrays,names=all_keys)\n \n self.table[z] = restructure_subtable # This is output table for specific z\n \n # Yield table output is self.table", "def forecast_weather(self):\n pass", "def gensettings(T, Z=1, E=2, n=5e19, yMax=20):\n betaTh = DREAM.Formulas.getNormalizedThermalSpeed(T)\n pMax = yMax * betaTh\n Ec = DREAM.Formulas.getEc(T, n)\n\n ds = DREAMSettings()\n\n ds.collisions.lnlambda = Collisions.LNLAMBDA_THERMAL\n\n ds.eqsys.E_field.setPrescribedData(E)\n ds.eqsys.n_i.addIon(name='Ion', Z=Z, n=n/Z, iontype=IonSpecies.IONS_PRESCRIBED_FULLY_IONIZED) # Imaginary ion with charge Z\n ds.eqsys.n_cold.setPrescribedData(n)\n ds.eqsys.T_cold.setPrescribedData(T)\n ds.eqsys.f_hot.setInitialProfiles(rn0=0, n0=n, rT0=0, T0=T)\n ds.eqsys.n_re.setAvalanche(avalanche=Runaways.AVALANCHE_MODE_NEGLECT)\n ds.eqsys.f_hot.setAdvectionInterpolationMethod(ad_int=FHot.AD_INTERP_QUICK)\n \n ds.hottailgrid.setNxi(20)\n ds.hottailgrid.setNp(100)\n ds.hottailgrid.setPmax(pMax)\n\n ds.runawaygrid.setEnabled(False)\n\n ds.radialgrid.setB0(1)\n ds.radialgrid.setMinorRadius(0.1)\n ds.radialgrid.setWallRadius(0.1)\n ds.radialgrid.setNr(1)\n\n tMax0 = pMax*Ec / E\n ds.timestep.setTmax(.9*tMax0)\n ds.timestep.setNt(nTimeSteps)\n\n ds.other.include('fluid/runawayRate', 'fluid/gammaDreicer')\n\n \"\"\" \n If using MUMPS, computation time can be reduced by 30%:\n ds.solver.setLinearSolver(Solver.LINEAR_SOLVER_MUMPS)\n \"\"\"\n \n return ds", "def get_weather_with_time(time):\n global DARK\n\n if TIME in range(6, 9):\n DARK = False\n return 1\n elif TIME in range(9, 13):\n return 2\n elif TIME in range(13, 16):\n return 3\n elif TIME in range(16, 19):\n if HAS_RAINCOAT:\n return 4\n else:\n if not NICE_WEATHER:\n add_strength(False, 10)\n return 5\n\n elif TIME in range(19, 22):\n if HAS_RAINCOAT:\n return 7\n else:\n if not NICE_WEATHER:\n add_strength(False, 10)\n return 6\n\n else: # 9 - 6am\n DARK = True\n if HAS_FLASHLIGHT:\n return 9\n else:\n return 8", "def temperature_and_fan_control():\n temp_room_1 = room1_temp()\n temp_room_2 = room2_temp()\n outside_temp = intake_temp()\n if temp_room_1 < 26 or temp_room_2 < 26:\n fan_1(False) # Temp under 26 in either room turn fan OFF\n aux_fan(False)\n fan_speed_toogle(False)\n remove_timestamp('emergency_temp_timestamp.txt')\n elif temp_room_1 >= 26 and temp_room_1 < 28 or temp_room_2 >= 26 and \\\n temp_room_2 < 28:\n fan_1(True) # Temp between 26 and 28 turn fan on speed 1\n aux_fan(False)\n fan_speed_toogle(False)\n remove_timestamp('emergency_temp_timestamp.txt')\n elif temp_room_1 >= 28 and temp_room_1 < 30 or temp_room_2 >= 28 and\\\n temp_room_2 < 30:\n fan_speed_toogle(True) # Temp over 28 increase fan speed\n remove_timestamp('emergency_temp_timestamp.txt')\n elif temp_room_1 >= 30 or temp_room_2 >= 30:\n if len(initial_time('emergency_temp_timestamp.txt')) == 0:\n time_write_to_file('emergency_temp_timestamp.txt', 'w')\n else:\n if mins_since_event('emergency_temp_timestamp.txt') > 5:\n aux_fan(True) # Temp been over 30 for 5mins aux_fan ON\n fan_speed_toogle(True)", "def checkEnvironment(ontology_environment):\n ontology_time_of_day = ontology_environment.has_time_of_day[0] #get the TimeOfDay individual in the ontology \n #Check TimeOfDay property assertions in the ontology and create the PYOSCX TimeOfDay accordingly.\n if len(ontology_time_of_day.has_animation) != 0:\n animation = ontology_time_of_day.has_animation[0]\n if len(ontology_time_of_day.has_year) != 0:\n year = ontology_time_of_day.has_year[0]\n if len(ontology_time_of_day.has_month) != 0:\n month = ontology_time_of_day.has_month[0] \n if len(ontology_time_of_day.has_day) != 0:\n day = ontology_time_of_day.has_day[0] \n if len(ontology_time_of_day.has_hour) != 0:\n hour = ontology_time_of_day.has_hour[0]\n if len(ontology_time_of_day.has_minute) != 0:\n minute = ontology_time_of_day.has_minute[0]\n if len(ontology_time_of_day.has_second) != 0:\n second = ontology_time_of_day.has_second[0]\n xosc_time_of_day = xosc.TimeOfDay(animation,year,month,day,hour,minute,second)\n #Check Weather property assertions in the ontology and create the PYOSCX Weather accordingly.\n ontology_weather = ontology_environment.has_weather[0] #get the Weather individual in the ontology\n if len(ontology_weather.has_cloud_state) != 0:\n xosc_cloud_state = checkCloudState(ontology_weather.has_cloud_state[0])\n if len(ontology_weather.has_fog) !=0:\n xosc_fog = checkFog(ontology_weather.has_fog[0])\n if len(ontology_weather.has_sun) !=0:\n xosc_sun = checkSun(ontology_weather.has_sun[0])\n if len(ontology_weather.has_precipitation) !=0:\n xosc_precipitation = checkPrecipitation(ontology_weather.has_precipitation[0])\n xosc_weather = xosc.Weather(xosc_cloud_state,sun = xosc_sun, fog = xosc_fog, precipitation = xosc_precipitation)\n #Check RoadCondtion property assertions in the ontology and create the PYOSCX RoadCondition accordingly.\n ontology_road_condition = ontology_environment.has_road_condition[0] #get the RoadCondition individual in the ontology\n if len(ontology_road_condition.has_friction_scale_factor) !=0:\n friction_scale_factor = ontology_road_condition.has_friction_scale_factor[0]\n xosc_road_condition = xosc.RoadCondition(friction_scale_factor)\n environment_name = getNameFromIRI(ontology_environment.iri)\n return xosc.Environment(environment_name,xosc_time_of_day,xosc_weather,xosc_road_condition)", "def _set_weather(self, month):\n mode = 0.0\n if month in Weather.winter_months:\n mode = -1.0\n elif month in Weather.summer_months:\n mode = 1.0\n self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0)\n self.status = random.choice(list(Weather.status))", "def thermal_i(mu,Ti):\n return 9.79*1.e5/np.sqrt(mu/Ti)/1.e2", "def make_environment(self):\n\t\tbase_layer = 0\n\t\tself.Gravity = 9.81\n\n\t\t#Private data for to define model\n\t\t__model_max_altitude = 87000\n\t\t__atmosphere_layers = {0:0, 11000:1, 20000:2, 32000:3, 47000:4, 51000:5, 71000:6}\n\t\t__layer_base_data = {\n\t\t\t0:{'temp':288.15, 'lapse':-0.0065, 'press':101325},\n\t\t\t1:{'temp':216.65, 'lapse':0, 'press':22632.1},\n\t\t\t2:{'temp':216.65, 'lapse':0.001, 'press':5474.89},\n\t\t\t3:{'temp':228.65, 'lapse':0.0028, 'press':868.019},\n\t\t\t4:{'temp':270.65, 'lapse':0, 'press':110.906},\n\t\t\t5:{'temp':270.65, 'lapse':-0.0028, 'press':66.9389},\n\t\t\t6:{'temp':214.65, 'lapse':-0.002, 'press':3.95642},\n\t\t\t}\n\t\t__gas_constant = 8.31432#e3\n\t\t__air_molar_mass = 0.0289644\n\t\t__specific_heat_ratio = 1.4\n\t\t__visc_lambda = 1.51204129e-6\n\t\t__visc_sutherland_const = 120.0\n\n\t\tif self.Altitude > __model_max_altitude:\n\t\t\traise helpers.extra_exceptions.ModelExtrapolationException(\n\t\t\t'Exceeded model maximum altitude')\n\n\t\tlayerKeys = __atmosphere_layers.keys()\n\t\tlayerKeys = list(layerKeys)\n\t\tlayerKeys.sort()\n\t\tfor layer in layerKeys:\n\t\t\tif self.Altitude >= layer:\n\t\t\t\tbase_layer = __atmosphere_layers[layer]\n\t\t\t\tbase_alt = layer\n\t\tbase_temp = __layer_base_data[base_layer]['temp']\n\t\tbase_lapse = __layer_base_data[base_layer]['lapse']\n\t\tbase_press = __layer_base_data[base_layer]['press']\n\n\t\tself.Temperature = base_temp + base_lapse * (self.Altitude - base_alt)\n\t\t+ self.Temperature_offset\n\n\t\tif base_lapse == 0:\n\t\t\tself.Pressure = base_press * \\\n\t\t\t\tnp.exp( (-self.Gravity*__air_molar_mass*(self.Altitude-base_alt)) \\\n\t\t\t\t/(__gas_constant*base_temp))\n\t\telse:\n\t\t\tself.Pressure = base_press * \\\n\t\t\t\t(base_temp/self.Temperature) ** \\\n\t\t\t\t(self.Gravity*__air_molar_mass/__gas_constant/base_lapse)\n\n\t\tself.Density = __air_molar_mass*self.Pressure / \\\n\t\t\t__gas_constant/self.Temperature\n\t\tself.Speed_of_sound = np.sqrt(__specific_heat_ratio*__gas_constant* \\\n\t\t\tself.Temperature/__air_molar_mass)\n\t\tself.Dynamic_viscosity = __visc_lambda*self.Temperature**(3.0/2.0)/ \\\n\t\t\t(self.Temperature+__visc_sutherland_const)", "def weather(obsblock, subarray=DEFAULT) :\n _closeTrial(\"WEATHER\",\n \"Array stopped due to weather conditions. \", subarray=subarray)\n commandlog(\"weather()\", subarray=subarray)\n newProject(\"ct017\", obsblock, \"\", False, subarray=subarray)\n intent(\"noise\", \"O\", subarray=subarray)\n # An integration is required to track the time in the PDB\n _pdbIntegration(subarray=subarray)\n alarmIntegdisable(subarray=subarray)\n print \"Integration alarm disabled...\"", "def weightThick(var,lats,types):\n \n if types == 'lens':\n sityr = np.empty((var.shape[0],var.shape[1],var.shape[2]))\n for ens in xrange(var.shape[0]):\n for i in xrange(var.shape[1]):\n for j in xrange(var.shape[2]):\n varq = var[ens,i,j,:,:]\n mask = np.isfinite(varq) & np.isfinite(lats)\n varmask = varq[mask]\n areamask = np.cos(np.deg2rad(lats[mask]))\n sityr[ens,i,j] = np.nansum(varmask*areamask)/np.sum(areamask)\n \n print 'Completed: Weighting per ensemble #%s!' % ense[ens]\n \n elif types == 'piomas':\n sityr = np.empty((var.shape[0],var.shape[1]))\n for i in xrange(var.shape[0]):\n for j in xrange(var.shape[1]):\n varq = var[i,j,:,:]\n mask = np.isfinite(varq) & np.isfinite(lats)\n varmask = varq[mask]\n areamask = np.cos(np.deg2rad(lats[mask]))\n sityr[i,j] = np.nansum(varmask*areamask)/np.sum(areamask)\n \n print '\\nCompleted: Yearly weighted SIT average!' \n return sityr", "def Static_T(Temp_m,M):\n return Temp_m/(1+(gamma-1)/2*M**2)", "def set_conditions(temp=None, humid=None):\n if temp == None:\n print('=== pretty stupid error, not setting temperature on'\n ' set_conditions()')\n thermo.sim_set_conditions(temp, humid)\n my_dict = set_status(None)\n # This will render a normal status frame in HTML\n return my_dict", "def test_take_temperature_readings0001(self, platform):\n\n temps = platform.take_temperature_readings()\n assert type(temps) is dict\n assert all([x.startswith(\"hw.sensors.\") for x in temps.iterkeys()])\n # check temperature readings are within reasonable parameters\n assert all([type(v) == float for v in temps.itervalues()])\n assert all([10 <= v <= 120 for v in temps.itervalues()])", "def test_temperatures(get_touchmat):\n touchmat = get_touchmat\n\n temperatures = touchmat.temperatures()\n info = touchmat.info()\n check_system_types.check_TemperatureInfoList(temperatures, [info])", "def create_data_model():\r\n data = {}\r\n data['period'] = int(sheet1.cell_value(1, getColumnIndex(sheet1,'่ฐƒๅบฆๅ‘จๆœŸ')))\r\n counttype_technician=3\r\n data['technician']=[]\r\n for i in range(1,1+counttype_technician):\r\n data['technician'].append(int(sheet1.cell_value(i, getColumnIndex(sheet1,'ๆŠ€ๅทฅๆ—ฅๅทฅ่ต„'))))\r\n data['base'] = {}\r\n count_base=1 # ็ ๅคดไธชๆ•ฐ\r\n data['base']['coordinate']=[]\r\n for i in range(1,1+count_base):\r\n base_x=sheet1.cell_value(i, getColumnIndex(sheet1,'็ ๅคดๅๆ ‡X'))\r\n base_y=sheet1.cell_value(i, getColumnIndex(sheet1,'็ ๅคดๅๆ ‡Y'))\r\n data['base']['coordinate'].append((base_x,base_y))\r\n\r\n data['base']['technician']=[]\r\n for b in range(0,count_base):\r\n data['base']['technician'].append([])\r\n for j in range(counttype_technician):\r\n data['base']['technician'][b].append([])\r\n for i in range(data['period']):\r\n data['base']['technician'][b][j].append(int(sheet1.cell_value(i+1, getColumnIndex(sheet1,'%d็ฑปๆŠ€ๅทฅๆ€ปไบบๆ•ฐ'% (j+1)))))\r\n\r\n data['wind_farm'] = {}\r\n count_wind_farm=2 #้œ€่ฆ็ปดไฟฎ็š„้ฃŽ็”ตๅœบไธชๆ•ฐ\r\n count_wind_turbine=[8,8] #ๆฏไธช้ฃŽ็”ตๅœบ้œ€่ฆ็ปดไฟฎ็š„้ฃŽๆœบไธชๆ•ฐ\r\n count_wind_turbine_sum=[36,36]# ๆฏไธช้ฃŽ็”ตๅœบๆ‰€ๆœ‰็š„้ฃŽๆœบไธชๆ•ฐ\r\n data['wind_farm']['maintenance_time']=[]\r\n count_wturbine=[] #็”จไบŽ่ฎกๆ•ฐ,่ฎฐๅฝ•ไธๅŒ้ฃŽ็”ตๅœบ้ฃŽๆœบไฟกๆฏๅœจExcelไฝ็ฝฎ\r\n count_wturbine_l=0\r\n for i in range(count_wind_farm):\r\n count_wturbine.append(count_wturbine_l)\r\n count_wturbine_l=count_wturbine_l+count_wind_turbine[i]\r\n count_turbine=[]\r\n count_turbine_l=0\r\n for i in range(count_wind_farm):\r\n count_turbine.append(count_turbine_l)\r\n count_turbine_l=count_turbine_l+count_wind_turbine_sum[i]\r\n\r\n ###่ฎพๅฎšไธŽ้ฃŽ็”ตๅœบ็›ธๅ…ณ็š„ๅ‚ๆ•ฐ\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['maintenance_time'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['maintenance_time'][i].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'้ฃŽๆœบ็ปดๆŠคๆ—ถ้—ด'))))\r\n\r\n data['wind_farm']['technician']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['technician'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['technician'][i].append([])\r\n for k in range(counttype_technician):\r\n data['wind_farm']['technician'][i][j].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'%d็ฑปๆŠ€ๅทฅ้œ€ๆฑ‚้‡'% (k+1)))))\r\n\r\n\r\n data['wind_farm']['parts_weight']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['parts_weight'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['parts_weight'][i].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'้ฃŽๆœบๆ‰€้œ€ๅค‡ไปถ้‡้‡'))))\r\n\r\n data['wind_farm']['present']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['present'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['present'][i].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'้ฃŽๆœบๅœจ็ปดไฟฎๆ—ถๆ˜ฏๅฆ้œ€่ฆ่ˆนๅœๆณŠ'))))\r\n\r\n data['wind_farm']['deadline']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['deadline'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['deadline'][i].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'ๆœ€ๆ™šๅปบ่ฎฎ็ปดไฟฎๆ—ถ้—ด'))))\r\n\r\n data['wind_farm']['penalty_cost']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['penalty_cost'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['penalty_cost'][i].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'้€พๆ—ถๆƒฉ็ฝšๆˆๆœฌ'))))\r\n\r\n data['vessel'] = {}\r\n counttype_vessel=3\r\n data['vessel']['capacity']=[]\r\n for i in range(counttype_vessel):\r\n data['vessel']['capacity'].append(int(sheet1.cell_value(i+1, getColumnIndex(sheet1,'่ˆน็š„ๅค‡ไปถๅฎน้‡'))))\r\n\r\n data['vessel']['technician']=[]\r\n for i in range(counttype_vessel):\r\n data['vessel']['technician'].append(int(sheet1.cell_value(i+1, getColumnIndex(sheet1,'่ˆน็š„ไบบๅ‘˜ๅฏ่ฝฝ้‡'))))\r\n\r\n data['vessel']['cost']=[]\r\n for i in range(counttype_vessel):\r\n data['vessel']['cost'].append(int(sheet1.cell_value(i+1, getColumnIndex(sheet1,'่ˆน็š„ๆฒน่ดน'))))\r\n\r\n data['vessel']['speed']=[]\r\n for i in range(counttype_vessel):\r\n data['vessel']['speed'].append(int(sheet1.cell_value(i+1, getColumnIndex(sheet1,'่ˆน็š„่ˆช้€Ÿ'))))\r\n\r\n data['vessel']['trans_time']=[] # ่ฟ™้‡Œ้ป˜่ฎค่ฝฌ็งปๆ—ถ้—ด่ทŸ่ˆน็š„็ฑปๅž‹ๆฒกๆœ‰ๅ…ณ็ณป๏ผŒไธŽๆ—ถๆœŸๆœ‰ๅ…ณ\r\n for i in range(data['period']):\r\n data['vessel']['trans_time'].append(sheet1.cell_value(i+1, getColumnIndex(sheet1,'ๆŠ€ๅทฅ่ฝฌ็งปๆ—ถ้—ด')))\r\n\r\n data['vessel']['time_window']=[]\r\n for i in range(counttype_vessel):\r\n data['vessel']['time_window'].append([])\r\n for j in range(data['period']):\r\n data['vessel']['time_window'][i].append([])\r\n for k in range(count_wind_farm):\r\n data['vessel']['time_window'][i][j].append(int(sheet1.cell_value(j+1, getColumnIndex(sheet1,'้ฃŽ็”ตๅœบ%d่ˆน%dๅฏไฝœไธšๆ—ถ้—ด'%(k+1,i+1)))))\r\n\r\n # # ้ฃŽๆœบๅๆ ‡\r\n # data['wind_farm']['coordinate']=[]\r\n # for i in range(count_wind_farm):\r\n # data['wind_farm']['coordinate'].append([])\r\n # for j in range(72):\r\n # turbine_x = sheet1.cell_value(j+1, getColumnIndex(sheet1, '้ฃŽๆœบๅๆ ‡X'))\r\n # turbine_y = sheet1.cell_value(j+1, getColumnIndex(sheet1, '้ฃŽๆœบๅๆ ‡Y'))\r\n # data['wind_farm']['coordinate'][i].append((turbine_x, turbine_y))\r\n\r\n # ้ฃŽๆœบๅๆ ‡\r\n data['wind_farm']['coordinate']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['coordinate'].append([])\r\n for j in range(count_wind_turbine_sum[i]):\r\n turbine_x = sheet1.cell_value(j+1+count_turbine[i], getColumnIndex(sheet1, '้ฃŽๆœบๅๆ ‡X'))\r\n turbine_y = sheet1.cell_value(j+1+count_turbine[i], getColumnIndex(sheet1, '้ฃŽๆœบๅๆ ‡Y'))\r\n data['wind_farm']['coordinate'][i].append((turbine_x, turbine_y))\r\n\r\n\r\n data['wind_farm']['task']=[]\r\n for i in range(count_wind_farm):\r\n data['wind_farm']['task'].append([])\r\n for j in range(count_wind_turbine[i]):\r\n data['wind_farm']['task'][i].append(int(sheet1.cell_value(j+1+count_wturbine[i], getColumnIndex(sheet1,'้œ€่ฆ็ปดไฟฎ้ฃŽๆœบ็ผ–ๅท'))))\r\n\r\n return data", "def power_output_existing_thermal_rule(_m, g, y, s, t):\r\n\r\n if y != m.Y.last() and t != m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_2[g, y, s, t]\r\n + m.sigma_20[g, y, s, t] - m.sigma_20[g, y, s, t + 1]\r\n - m.sigma_23[g, y, s, t] + m.sigma_23[g, y, s, t + 1]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n elif y != m.Y.last() and t == m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_2[g, y, s, t]\r\n + m.sigma_20[g, y, s, t]\r\n - m.sigma_23[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n elif y == m.Y.last() and t != m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_2[g, y, s, t]\r\n + m.sigma_20[g, y, s, t] - m.sigma_20[g, y, s, t + 1]\r\n - m.sigma_23[g, y, s, t] + m.sigma_23[g, y, s, t + 1]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (1 + (1 / m.INTEREST_RATE)) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n elif y == m.Y.last() and t == m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_2[g, y, s, t]\r\n + m.sigma_20[g, y, s, t]\r\n - m.sigma_23[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (1 + (1 / m.INTEREST_RATE)) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n else:\r\n raise Exception(f'Unhandled case: {g, y, s, t}')", "def calculateTemperature(self):\n \n # CIE XYZ space\n self.X = (1/0.17697)*((0.49)*self.R + (0.31)*self.G + (0.2)*self.B)\n self.Y = (1/0.17697)*((0.17697)*self.R + (0.81240)*self.G + (0.01063)*self.B)\n self.Z = (1/0.17697)*((0)*self.R + (0.010)*self.G + (0.99)*self.B)\n\n # CIE Chromaticities xy\n self.x = self.X/(self.X + self.Y + self.Z)\n self.y = self.Y/(self.X + self.Y + self.Z)\n \n # CIE Chromaticities uv\n #self.u = (0.4661*self.x + 0.1593*self.y)/(self.y - 0.15735*self.x + 0.2424)\n #self.v = (0.6581*self.y)/(self.y - 0.15735*self.x + 0.2424)\n \n # constant for McCamy's/Hernandez-Andrรฉs formula\n n = (self.x - self.x_e)/(self.y - self.y_e)\n \n # Correlated color temperature according to Hernรกndez-Andrรฉs (1999)\n self.color_temp = ( self.A_0 + \n self.A_1*np.exp(-n/self.t_1) + \n self.A_2*np.exp(-n/self.t_2) + \n self.A_3*np.exp(-n/self.t_3) )\n \n # Delete too high values\n self.color_temp[self.color_temp > 30000] = 0\n \n # Affichage de la CCT\n self.mean_temp = int(round(self.color_temp.mean()))\n self.mean_temp_label.setText(\"Temperature moyenne = \"+str(self.mean_temp))\n self.mean_temp_label.adjustSize()\n \t\n # Affichage de l'illuminance (Y)\n self.mean_illu = int(round((self.Y.mean())))\n self.illuminance_label.setText(\"Illuminance moyenne = \"+str(self.mean_illu))\n self.illuminance_label.adjustSize()", "def test_template_assignment():\n humidity_template = ConditionTemplate(\"Humidity\", bounds=RealBounds(0.5, 0.75, \"\"))\n template = ProcessTemplate(\"Dry\", conditions=[[humidity_template, RealBounds(0.5, 0.65, \"\")]])\n ProcessSpec(\"Dry a polymer\", template=template, conditions=[\n Condition(\"Humidity\", value=NominalReal(0.6, \"\"), template=humidity_template)])", "def temperature_on_pressure_levels() -> Cube:\n temperatures = np.array([300, 286, 280, 274, 267, 262, 257, 245], dtype=np.float32)\n data = np.broadcast_to(\n temperatures.reshape((1, len(temperatures), 1, 1)), (2, len(temperatures), 3, 2)\n )\n t_cube = set_up_variable_cube(\n data,\n pressure=True,\n height_levels=np.arange(100000, 29999, -10000),\n name=\"temperature_on_pressure_levels\",\n units=\"K\",\n attributes=LOCAL_MANDATORY_ATTRIBUTES,\n )\n return t_cube", "def make_data(args):\n mass_MJ = 1.142\n radius_RJ = 1.138\n gravity_SI = 23.970 \n Rs_Rsun = 0.805\n inc = 85.71\n t0 = 2454037.612\n sma = 8.839304998 # semi major axis in stellar radiu\n orb_per = 2.21857545 #in days\n ecc = 0.0041\n w_peri = -24.1 # longiutude of periastron\n limbdark = \"linear\"\n \n u_limbdark = [0.35]\n \n num_transit = 1\n \n dates = [2458383.77055943, 2458383.77384704, 2458383.77707875,\n 2458383.78030307, 2458383.78358918, 2458383.78681399,\n 2458383.79004101, 2458383.79326712, 2458383.79655574,\n 2458383.79984545, 2458383.80307906, 2458383.80629228,\n 2458383.80958299, 2458383.8128124 , 2458383.81603942,\n 2458383.81925973, 2458383.82248474, 2458383.82577195,\n 2458383.82900097, 2458383.83223048, 2458383.8354501 ,\n 2458383.83874811, 2458383.84196822, 2458383.84520053,\n 2458383.84847654, 2458383.85170346, 2458383.85493727,\n 2458383.85821578, 2458383.86144419, 2458383.86466921,\n 2458383.86790322, 2458383.87118233, 2458383.87441074,\n 2458383.87763435, 2458383.88092406, 2458383.88414957],\n #don't forget the coma at the end if there is only one transit !!!!!\n \n\n\n # Wmean = [2400.695909757236,2328.5343131275904,1972.9809993156186,\n # 1927.2107049022654,]\n # Wmean = [1634.5200937047302,1600.8109822367207],[1670.071564637037,1634.5459486709924,1600.8124596368639],\n Wmean = [2328.5343131275904], \n orderstot = [33]\n orders = [33],\n # orderstot = [46,47,48]\n # orders = [47,48],[46,47,48],\n \n # Vfiles = [\"Vcorr47_DRS2.txt\",\n # \"Vcorr48_DRS2.txt\",\n # ],[\"Vcorr46_Jun19-1_DRS2.txt\",\n # \"Vcorr47_Jun19-1_DRS2.txt\",\n # \"Vcorr48_Jun19-1_DRS2.txt\"\n # ],\n Vfiles = [\"V33_CO.txt\"], \n \n Ifiles = [\"I33_CO.txt\"],\n \n # if Stdfiles are not needed, for example with the Brogi likelihood, \n # uncomment the next line\n #Stdfiles = []\n Stdfiles = [\"Std33_CO.txt\"],\n \n lambdas = np.array([[ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [2291.84518119, 2362.55271775],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1939.42197854, 1998.81548771],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1758.50261646, 1812.39702422],\n [1718.50054581, 1771.64067835],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1512.43747007, 1558.89713666],\n [1484.77586677, 1528.30354258],\n [1457.06015806, 1498.88570675],\n [1429.75333156, 1470.19096444],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1306.967007 , 1343.21643463],\n [1285.02046052, 1320.56072659],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1167.78440327, 1198.13940642],\n [1150.59417256, 1178.48372217],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ]])\n\n return dict(\n mass_MJ=mass_MJ,\n radius_RJ=radius_RJ,\n\t\tgravity_SI = gravity_SI,\n\t\tRs_Rsun = Rs_Rsun,\n\t\tinc = inc,\n\t\tt0 = t0,\n\t\tsma = sma,\n\t\torb_per = orb_per,\n\t\tecc = ecc,\n\t\tw_peri = w_peri,\n Wmean = Wmean,\n\t\tlimbdark = limbdark,\n\t\tu_limbdark = u_limbdark,\n\t\tdates = dates,\n\t\tlambdas = lambdas,\n orders = orders,\n orderstot=orderstot,\n num_transit=num_transit,\n\t\tVfiles = Vfiles,\n\t\tIfiles = Ifiles,\n\t\tStdfiles = Stdfiles\n\t\t )", "def generate_polynesian_weather_data():\n weather_path = os.path.dirname(os.path.realpath(__file__))\n low_fp = weather_path + \"/polynesia_weather/low/1976/\"\n med_fp = weather_path + \"/polynesia_weather/med/1985/\"\n high_fp = weather_path + \"/polynesia_weather/high/1982/\"\n low_name = \"polynesia_1976\"\n med_name = \"polynesia_1985\"\n high_name = \"polynesia_1982\"\n generate_year_weather_data(low_fp, low_name)\n generate_year_weather_data(med_fp, med_name)\n generate_year_weather_data(high_fp, high_name)", "def temps(lieu):\r\n\r\n key = '5a72ceae1feda40543d5844b2e04a205'\r\n localisation = \"http://api.openweathermap.org/data/2.5/weather?q={0},fr&appid={1}\"\r\n localisation = localisation.format(lieu, key)\r\n request_html = requests.get(localisation)\r\n data = request_html.json()\r\n\r\n weather = data['weather'][0]['main']\r\n\r\n if weather == \"Clear\":\r\n weather = \"Beau\"\r\n\r\n elif weather == \"Clouds\":\r\n weather = \"Nuageux\"\r\n return weather", "def trajectories(t_upper=3600*24*687, h=100, m1=5.972e+24, m2=6.417e+23,\n m3=1.989e+30, a1=1.0*1.496e+11, a2=1.52*1.496e+11):\n\n # We check if parameters are all positive\n\n list_parameters = [t_upper, h, m1, m2, m3,\n a1, a2]\n\n for parameters in list_parameters:\n\n if parameters < 0:\n print(f'You have entered a negative parameter')\n\n # initial values for planet 1 in x, y and z direction\n x_i1 = a1\n y_i1 = 0\n v_x1i = 0\n v_y1i = 29779.301841746023\n z_i1 = 0\n v_z1i = 0\n\n # initial values for planet 2 in x, y and z direction\n x_i2 = a2\n y_i2 = 0\n v_x2i = 0\n v_y2i = 24154.203325249873\n z_i2 = 0\n v_z2i = 0\n\n # initial values for Sun in x, y and z direction\n x_i3 = 0\n y_i3 = 0\n v_x3i = 0\n v_y3i = 0\n z_i3 = 0\n v_z3i = 0\n\n# Initial positions and velocities\n r = np.array([x_i1, y_i1, v_x1i, v_y1i, x_i2,\n y_i2, v_x2i, v_y2i, x_i3, y_i3, v_x3i, v_y3i,\n z_i1, z_i2, z_i3, v_z1i, v_z2i, v_z3i])\n\n # We create vectors which will contains the trajectories\n # and velocities of each bodies\n x_pnts1 = [x_i1]\n y_pnts1 = [y_i1]\n v_x_pnts1 = [v_x1i]\n v_y_pnts1 = [v_y1i]\n\n x_pnts2 = [x_i2]\n y_pnts2 = [y_i2]\n v_x_pnts2 = [v_x2i]\n v_y_pnts2 = [v_y2i]\n\n x_pnts3 = [x_i3]\n y_pnts3 = [y_i3]\n v_x_pnts3 = [v_x3i]\n v_y_pnts3 = [v_y3i]\n\n x_pnts3 = [x_i3]\n y_pnts3 = [y_i3]\n v_x_pnts3 = [v_x3i]\n v_y_pnts3 = [v_y3i]\n\n z_pnts1 = [z_i1]\n z_pnts2 = [z_i2]\n z_pnts3 = [z_i3]\n\n v_z_pnts1 = [v_z1i]\n v_z_pnts2 = [v_z2i]\n v_z_pnts3 = [v_z3i]\n\n m1 = m1\n m2 = m2\n m3 = m3\n a1 = a1\n a2 = a2\n\n # We create a vector which will contain the time\n # Initial value\n t_i = 0.0\n t_values = [t_i]\n\n for t in range(0, t_upper, h):\n\n # We used the RK4 formula here\n k1 = h*derivative(r=r, t=0, m1=5.972e+24, m2=m2, m3=1.989e+30,\n a1=a1, a2=1.52*1.496e+11)\n k2 = h*derivative(r=r + 0.5*k1, t=t + (h/2), m1=5.972e+24,\n m2=6.417e+23, m3=1.989e+30, a1=1.0*1.496e+11,\n a2=1.52*1.496e+11)\n k3 = h*derivative(r=r + 0.5*k2, t=t + (h/2), m1=5.972e+24,\n m2=6.417e+23, m3=1.989e+30, a1=1.0*1.496e+11,\n a2=1.52*1.496e+11)\n k4 = h*derivative(r=r + h*k3, t=t+h, m1=5.972e+24, m2=6.417e+23,\n m3=1.989e+30, a1=1.0*1.496e+11, a2=1.52*1.496e+11)\n\n # We calculate the new vector r\n r += (k1 + 2*k2 + 2*k3 + k4)*(1.0/6.0)\n\n # We add the new points calculated\n x_pnts1.append(r[0])\n y_pnts1.append(r[1])\n\n v_x_pnts1.append(r[2])\n v_y_pnts1.append(r[3])\n\n x_pnts2.append(r[4])\n y_pnts2.append(r[5])\n v_x_pnts2.append(r[6])\n v_y_pnts2.append(r[7])\n\n x_pnts3.append(r[8])\n y_pnts3.append(r[9])\n v_x_pnts3.append(r[10])\n v_y_pnts3.append(r[11])\n\n z_pnts1.append(r[12])\n z_pnts2.append(r[13])\n z_pnts3.append(r[14])\n\n v_z_pnts1.append(r[15])\n v_z_pnts2.append(r[16])\n v_z_pnts3.append(r[17])\n\n t_values.append(t)\n\n # We return all the trajectories\n return x_pnts1, y_pnts1, x_pnts2, y_pnts2, x_pnts3, y_pnts3, z_pnts1, z_pnts2, z_pnts3", "def interpolating_model_DA(temp,grav,m_type='da2014'):\n # PARAMETERS # \n dir_models = basedir + '/WDModels_Koester.'+m_type+'_npy/'\n if m_type==\"pier\":\n teff=np.array([1500.,1750.,2000.,2250.,2500.,2750.,3000.,3250.,3500.,\n 3750.,4000.,4250.,4500.,4750.,5000.,5250.,5500.,6000.,\n 6500.,7000.,7500.,8000.,8500.,9000.,9500.,10000.,10500.,\n 11000.,11500.,12000.,12500.,13000.,13500.,14000.,14500.,\n 15000.,15500.,16000.,16500.,17000.,20000.,25000.,30000.,\n 35000.,40000.,45000.,50000.,55000.,60000.,65000.,70000.,\n 75000.,80000.,85000.,90000.])\n logg=np.array([6.50,7.00,7.50,7.75,8.00,8.25,8.50,9.00,9.50])\n elif m_type==\"da2014\":\n teff=np.array([6000.,6250.,6500.,6750.,7000.,7250.,7500.,7750.,8000.,\n 8250.,8500.,8750.,9000.,9250.,9500.,9750.,10000.,10100.,\n 10200.,10250.,10300.,10400.,10500.,10600.,10700.,10750.,\n 10800.,10900.,11000.,11100.,11200.,11250.,11300.,11400.,\n 11500.,11600.,11700.,11750.,11800.,11900.,12000.,12100.,\n 12200.,12250.,12300.,12400.,12500.,12600.,12700.,12750.,\n 12800.,12900.,13000.,13500.,14000.,14250.,14500.,14750.,\n 15000.,15250.,15500.,15750.,16000.,16250.,16500.,16750.,\n 17000.,17250.,17500.,17750.,18000.,18250.,18500.,18750.,\n 19000.,19250.,19500.,19750.,20000.,21000.,22000.,23000.,\n 24000.,25000.,26000.,27000.,28000.,29000.,30000.,35000.,\n 40000.,45000.,50000.,55000.,60000.,65000.,70000.,75000.,\n 80000.,90000.,100000.])\n logg=np.array([4.00,4.25,4.50,4.75,5.00,5.25,5.50,5.75,6.00,6.25,6.50,\n 6.75,7.00,7.25,7.50,7.75,8.00,8.25,8.50,8.75,9.00,9.25,\n 9.50])\n if (m_type=='pier') & (temp<1500. or temp>90000. or grav<6.5 or grav>9.5): \n return [],[]\n elif (m_type=='da2014') & (temp<6000. or temp>100000. or grav<4.0 or grav>9.5): \n return [],[]\n # INTERPOLATION #\n g1,g2 = np.max(logg[logg<=grav]),np.min(logg[logg>=grav])\n if g1!=g2: g = (grav-g1)/(g2-g1)\n else: g=0\n t1,t2 = np.max(teff[teff<=temp]),np.min(teff[teff>=temp])\n if t1!=t2: t = (temp-t1)/(t2-t1) \n else: t=0\t\n if m_type =='da2014': models = ['da%06d_%d_2.7.npy'%(i, j*100) for i in [t1,t2] \n for j in [g1,g2]]\n else: models = ['WD_%.2f_%d.0.npy'%(j, i) for i in [t1,t2] for j in [g1,g2]]\n try:\n m11, m12 = np.load(dir_models+models[0]), np.load(dir_models+models[1])\t\n m21, m22 = np.load(dir_models+models[2]), np.load(dir_models+models[3])\t\n flux_i = (1-t)*(1-g)*m11[:,1]+t*(1-g)*m21[:,1]+t*g*m22[:,1]+(1-t)*g*m12[:,1]\n return np.dstack((m11[:,0], flux_i))[0]\n except: return [],[]", "def simulate_wc(pars):\n \n # Set parameters\n tau_E, a_E, theta_E = pars['tau_E'], pars['a_E'], pars['theta_E']\n tau_I, a_I, theta_I = pars['tau_I'], pars['a_I'], pars['theta_I']\n wEE, wEI = pars['wEE'], pars['wEI'] \n wIE, wII = pars['wIE'], pars['wII']\n I_ext_E, I_ext_I = pars['I_ext_E'], pars['I_ext_I'] \n E_init, I_init = pars['E_init'], pars['I_init'] \n dt, range_t = pars['dt'], pars['range_t'] \n Lt = range_t.size \n \n # Initialize activity\n E = np.zeros(Lt)\n I = np.zeros(Lt)\n E[0] = E_init\n I[0] = I_init\n I_ext_E = I_ext_E * np.ones(Lt)\n I_ext_I = I_ext_I * np.ones(Lt)\n\n # simulate the Wilson-Cowan equations \n for k in range(Lt-1):\n dE = dt/tau_E * (-E[k] + F(wEE*E[k]-wEI*I[k]+I_ext_E[k],a_E,theta_E))\n dI = dt/tau_I * (-I[k] + F(wIE*E[k]-wII*I[k]+I_ext_I[k],a_I,theta_I))\n E[k+1] = E[k] + dE\n I[k+1] = I[k] + dI\n \n return E,I", "def _setupWeather(self, w, config):\n wnames = ('cloud', 'seeing')\n if w not in wnames:\n raise Exception('w should be one of %s' %(wnames))\n filename = config['%s_datafile' %(w)]\n file = open(filename, 'r')\n # Also assume flat file contains only date / value in a space or tab separated file. \n self.dates[w] = []\n self.weather[w] = []\n # Read the data file.\n print '# Reading weather data file %s' %(filename)\n for line in file:\n if line.startswith('#') | line.startswith('!'):\n continue\n self.dates[w].append(line.split()[0])\n self.weather[w].append(line.split()[1])\n file.close()\n self.dates[w] = numpy.array(self.dates[w], float)\n self.weather[w] = numpy.array(self.weather[w], float)\n # Check the total amount of data (mostly for user awareness):\n print '# Read %d weather values from %s file. ' %(len(self.weather[w]), filename)\n # Check that weather data is monotonically increasing in time. \n if not(numpy.all(numpy.diff(self.dates[w]))):\n order = self.dates[w].argsort()\n self.weather[w] = self.weather[w][order]\n self.dates[w] = self.dates[w][order]\n # Get the total length of time included in this (seeing/cloud) file,\n # so that we can determine a wrap-around date if we need that.\n self.maxtime[w] = self.dates[w].max()\n return", "def thermal_conductivity(temperature):\n a0 = -4.1236\n a1 = 13.788\n a2 = -26.068\n a3 = 26.272\n a4 = -14.663\n a5 = 4.4954\n a6 = -0.6905\n a7 = 0.0397\n log_t = math.log10(temperature)\n f_exp = a0 + a1*log_t + a2*log_t**2.0 + a3*log_t**3.0 + a4*log_t**4.0 + \\\n a5*log_t**5.0 + a6*log_t**6.0 + a7*log_t**7\n g10_thermal_conductivity = 10.0**f_exp\n return g10_thermal_conductivity", "def SC_generation(hourly_radiation, prop_observers, number_groups, weather_data, g, Sz, Az, ha, Tin_C, height,\n panel_properties, latitude):\n\n\n n0 = panel_properties['n0']\n c1 = panel_properties['c1']\n c2 = panel_properties['c2']\n mB0_r = panel_properties['mB0_r']\n mB_max_r = panel_properties['mB_max_r']\n mB_min_r = panel_properties['mB_min_r']\n C_eff = panel_properties['C_eff']\n t_max = panel_properties['t_max']\n IAM_d = panel_properties['IAM_d']\n Aratio = panel_properties['aperture_area_ratio']\n Apanel = panel_properties['module_area']\n dP1 = panel_properties['dP1']\n dP2 = panel_properties['dP2']\n dP3 = panel_properties['dP3']\n dP4 = panel_properties['dP4']\n Cp_fluid_JperkgK = panel_properties['Cp_fluid'] # J/kgK\n\n # create lists to store results\n list_results = [None] * number_groups\n list_areas_groups = [None] * number_groups\n Sum_mcp_kWperC = np.zeros(8760)\n Sum_qout_kWh = np.zeros(8760)\n Sum_Eaux_kWh = np.zeros(8760)\n Sum_qloss = np.zeros(8760)\n Sum_radiation_kWh = np.zeros(8760)\n\n Tin_array_C = np.zeros(8760) + Tin_C\n aperature_area_per_module = Aratio * Apanel\n total_area_module = prop_observers['total_area_module'].sum() # total area for panel installation\n\n # calculate equivalent length of pipes\n lv = panel_properties['module_length'] # module length\n number_modules = round(total_area_module/Apanel) # this is an estimation\n l_ext_mperm2 = (2 * lv * number_modules/ (total_area_module * Aratio)) # pipe length within the collectors\n l_int_mperm2 = 2 * height / (total_area_module * Aratio) # pipe length from building substation to roof top collectors\n Leq_mperm2 = l_int_mperm2 + l_ext_mperm2 # in m/m2 aperture\n\n if panel_properties['type'] == 'ET': # for evacuated tubes\n Nseg = 100 # default number of subsdivisions for the calculation\n else:\n Nseg = 10 # default number of subsdivisions for the calculation\n\n for group in range(number_groups):\n # load panel angles from group\n teta_z = prop_observers.loc[group, 'surface_azimuth'] # azimuth of panels of group\n area_per_group = prop_observers.loc[group, 'total_area_module']\n tilt_angle_deg = prop_observers.loc[group, 'tilt'] # tilt angle of panels\n\n # create dataframe with irradiation from group\n\n radiation_Wh = pd.DataFrame({'I_sol': hourly_radiation[group]})\n radiation_Wh['I_diffuse'] = weather_data.ratio_diffhout * radiation_Wh.I_sol # calculate diffuse radiation\n radiation_Wh['I_direct'] = radiation_Wh['I_sol'] - radiation_Wh['I_diffuse'] # calculate direct radiation\n radiation_Wh.fillna(0, inplace=True) # set nan to zero\n\n # calculate incidence angle modifier for beam radiation\n IAM_b = calc_IAM_beam_SC(Az, g, ha, teta_z, tilt_angle_deg, panel_properties['type'], Sz, latitude)\n\n # calculate heat production from a solar collector of each group\n list_results[group] = calc_SC_module(tilt_angle_deg, IAM_b, IAM_d, radiation_Wh.I_direct,\n radiation_Wh.I_diffuse, weather_data.drybulb_C, n0,\n c1, c2, mB0_r, mB_max_r, mB_min_r, C_eff, t_max,\n aperature_area_per_module, dP1, dP2, dP3, dP4,\n Cp_fluid_JperkgK, Tin_C, Leq_mperm2, l_ext_mperm2,\n l_int_mperm2, Nseg)\n\n\n # multiplying the results with the number of panels in each group and write to list\n number_modules_per_group = area_per_group / Apanel\n list_areas_groups[group] = area_per_group\n radiation_array = hourly_radiation[group] * list_areas_groups[group] / 1000 # kWh\n Sum_qout_kWh = Sum_qout_kWh + list_results[group][1] * number_modules_per_group\n Sum_Eaux_kWh = Sum_Eaux_kWh + list_results[group][2] * number_modules_per_group\n Sum_qloss = Sum_qloss + list_results[group][0] * number_modules_per_group\n Sum_mcp_kWperC = Sum_mcp_kWperC + list_results[group][5] * number_modules_per_group\n Sum_radiation_kWh = Sum_radiation_kWh + radiation_Wh['I_sol']*area_per_group/1000\n\n Tout_group_C = (Sum_qout_kWh / Sum_mcp_kWperC) + Tin_C # in C assuming all collectors are connected in parallel\n\n Final = pd.DataFrame(\n {'Q_SC_gen_kWh': Sum_qout_kWh, 'T_SC_sup_C': Tin_array_C, 'T_SC_re_C': Tout_group_C, 'mcp_SC_kWperC': Sum_mcp_kWperC, 'Eaux_SC_kWh': Sum_Eaux_kWh,\n 'Q_SC_l_kWh': Sum_qloss, 'Area_SC_m2': sum(list_areas_groups), 'radiation_kWh': Sum_radiation_kWh}, index=range(8760))\n\n return list_results, Final", "def calculate_surface_heatflux(self, weather, spaces_dict, surface, temp_record, Coeff, space, h_surface, ShadowsFlag, ns, shadow_record, shade_surf_list, surfaces_dict, Aflag, terrain, areaDict, areaWinDict, shadowRatios, shadowRatioIndex):\r\n #print \"Reaching Surface function...\"\r\n\r\n # First get the As\r\n A_total = self.get_A(surface, areaDict, areaWinDict)\r\n if Aflag == 0:\r\n # If it is the first surface of the space, label the space ID in the log file:\r\n la = str(surface.obj_id)\r\n lb = str(surface.obj_type)\r\n #TM_user.info(\"%s,surface area,%s,%s\" % (la, A_total, lb))\r\n A_noWin = self.get_A_noWin(surface, areaDict, areaWinDict)\r\n A_noOp = self.get_A_noOp(surface, areaDict, areaWinDict)\r\n T_space = spaces_dict[space.obj_id][1]\r\n T1 = weather[\"t_outside\"]\r\n hc_external = float(self.get_hc_external(weather, surface, h_surface, terrain))\r\n transmitted_win = 0\r\n Q_flux = 0\r\n\r\n # need the surface related information, T_space, U, R3\r\n U = self.get_U_surface_e(A_total, A_noOp, surface, areaWinDict) # U = Infor_surface{11,i_surface}; Defined Below\r\n #print U\r\n R3 = 1/U\r\n # Using calculations from: self.surface.constr.layer.C # Infor_surface{10, i_surface} ; from gbXML\r\n C = self.get_C_surface(A_total, A_noOp, surface, Coeff, areaWinDict) # need to pass surface and opening ids\r\n #print C\r\n\r\n temperature = Temperature()\r\n\r\n #Sub-routines for each wall type based on the returned hc_external\r\n # This hc is different for each surface type so moved under this sub-routine area\r\n #hc = 3.076 sent this to the Temperature Object\r\n if surface.obj_type == \"ExteriorWall\":\r\n transmitted_win, Q_flux = temperature.exterior_wall(surface, hc_external, T1, A_total, A_noWin, weather, R3, C, T_space, temp_record, ShadowsFlag, ns, shadow_record, shade_surf_list, surfaces_dict, areaWinDict, shadowRatios, areaDict, shadowRatioIndex)\r\n #print Q_flux\r\n if surface.obj_type == \"Roof\":\r\n transmitted_win, Q_flux = temperature.roof(surface, hc_external, T1, A_total, A_noWin, weather, R3, C, A_noOp, T_space, temp_record, ShadowsFlag, ns, shadow_record, shade_surf_list, surfaces_dict, areaWinDict, shadowRatios, areaDict, shadowRatioIndex)\r\n #print Q_flux # Matches for Four Room\r\n if surface.obj_type == \"InteriorWall\":\r\n transmitted_win, Q_flux = temperature.interior_wall(surface, A_total, R3, C, spaces_dict, T_space, temp_record)\r\n #print Q_flux # Matches for Four Room\r\n if surface.obj_type == \"UndergroundWall\":\r\n transmitted_win, Q_flux = temperature.underground_wall(surface, A_total, R3, C, T_space, temp_record) # No instance of yet to test\r\n if surface.obj_type == \"RaisedFloor\":\r\n # This will eventually need some values when we start using raised floors\r\n transmitted_win, Q_flux = temperature.raised_floor(surface, hc_external, T1, A_total, A_noWin, weather, R3, C, A_noOp, T_space, temp_record) # Not instance of yet to test\r\n\r\n return transmitted_win, Q_flux", "def test_create_tbg_neural_efficacies(self):\n np.random.seed(25432)\n m_act = 5.\n v_act = .05\n v_inact = .05\n cdef = [Condition(m_act=m_act, v_act=v_act, v_inact=v_inact)]\n npos = 5000\n labels = np.zeros((1, npos), dtype=int)\n labels[0, :npos / 2] = 1\n phy_params = phy.PHY_PARAMS_FRISTON00\n ne = phy.create_tbg_neural_efficacies(phy_params, cdef, labels)\n\n # check shape consistency:\n self.assertEqual(ne.shape, labels.shape)\n\n # check that moments are close to theoretical ones\n ne_act = ne[0, np.where(labels[0])]\n ne_inact = ne[0, np.where(labels[0] == 0)]\n m_act_theo = truncnorm.mean(0, phy_params['eps_max'], loc=m_act,\n scale=v_act ** .5)\n v_act_theo = truncnorm.var(0, phy_params['eps_max'], loc=m_act,\n scale=v_act ** .5)\n (ne_act.mean(), m_act_theo)\n npt.assert_approx_equal(ne_act.var(), v_act_theo, significant=2)\n\n m_inact_theo = truncnorm.mean(0, phy_params['eps_max'], loc=0.,\n scale=v_inact ** .5)\n v_inact_theo = truncnorm.var(0, phy_params['eps_max'], loc=0.,\n scale=v_inact ** .5)\n npt.assert_approx_equal(ne_inact.mean(), m_inact_theo, significant=2)\n npt.assert_approx_equal(ne_inact.var(), v_inact_theo, significant=2)\n npt.assert_array_less(ne, phy_params)\n npt.assert_array_less(0., ne)", "def TM_fluid(layer, kx, om):\n\n h = layer.d\n rho = layer.medium.rho\n K = layer.medium.K\n k = om*np.sqrt(rho/K)\n ky = np.sqrt(k**2-kx**2)\n T = np.zeros((2, 2), dtype=complex)\n T[0, 0] = np.cos(ky*h)\n T[1, 0] = (om**2*rho/ky)*np.sin(ky*h)\n T[0, 1] = -(ky/(om**2*rho))*np.sin(ky*h)\n T[1, 1] = np.cos(ky*h)\n return T", "def calculateObservables(N, temp, thermalizationSteps=50, simulationSteps=100, skipSweeps=5, bootstrapM=500):\n\n # Where we will store the magnetizations and energies\n magnetizationArr = np.zeros(simulationSteps)\n energyArr = np.zeros(simulationSteps)\n\n # This isn't really a variable since the Wolff method requires h = 0,\n # but I'll define it to make the formulas be more verbose\n # That being said ****DO NOT CHANGE THIS****\n h = 0\n\n # Generate our initial conditions\n # Use cold start, but it shouldn't matter since we thermalize\n isingMap = Ising.initializeCold(N)\n \n # Thermalize\n for i in range(thermalizationSteps):\n Ising.singleClusterFlip(isingMap, 1/temp) \n\n # Now we'll actually record data\n for i in range(simulationSteps):\n # Note that this magnetization is not the magnitization per site, so\n # we have to divide by a factor of N later on\n magnetizationArr[i] = Ising.magnetization(isingMap)\n\n # This is indeed the energy density, and therefore we multiply by a\n # factor of N later on as opposed to the magnetization above\n energyArr[i] = Ising.energyDensity(isingMap, h)\n\n # Skip a few steps to reduce the correlation between successive measurements\n for j in range(skipSweeps):\n Ising.singleClusterFlip(isingMap, 1/temp)\n\n\n # The variance in the magnetization should be proportional to the susceptability\n magnetizationVariance = Ising.statisticalBootstrap(magnetizationArr, np.var, bootstrapM)\n # The variance in the energy should be proportional to the specific heat\n energyVariance = Ising.statisticalBootstrap(energyArr, np.var, bootstrapM)\n\n # These constants out front are questionable, but in general were taken from\n # https://www.maths.tcd.ie/~bouracha/reports/2-dimensional_ising_model.pdf\n # Because of this uncertainty, we opted to solve for alpha and nu, since they\n # don't depend on the magnitude of the peaks, only their positions\n chi = (1/N**2) * 1/temp * magnetizationVariance\n C = N**2 * (1/temp)**2 * energyVariance\n\n return [C, chi]", "def ionization_constant_water(temperature=298.15, density=None):\n import numpy as np\n\n # using Model II from Bandura etal\n # model parameters\n n = 6\n alpha_0 = -0.864671\n alpha_1 = 8659.19\n alpha_2 = -22786.2\n beta_0 = 0.642044\n beta_1 = -56.8534\n beta_2 = -0.375754\n\n # Water parameters\n Mw = 18.01528\n\n # temperature\n T = temperature\n\n # density\n if density:\n D = density\n else:\n D = density_water(T)\n\n pKWG = 0.61415 \\\n + 48251.33 / T \\\n - 67707.93 / T**2.0 \\\n + 10102100.0 / T**3.0\n\n Z = D * np.exp(alpha_0 \\\n + alpha_1/T \\\n + alpha_2/T**2 *np.power(D,2.0/3.0)\n )\n\n pKw = -2*n*(\n np.log10(1 + Z) - (Z/(Z + 1)) * D * (\n beta_0 + beta_1/T + beta_2*D\n )\n ) + pKWG + 2 * np.log10(Mw/1000.0)\n\n return np.power(10, -pKw)", "def __init__(self, type='WALL_HEAT_FLUX', name=None, pressure_type=None, result_type=None, age_of_fluid_diffusion=None, turbulent_schmidt_number=None, diffusion_coefficient=None, clothing_coefficient_factor=None, metabolic_rate_factor=None, relative_humidity_factor=None, compute_sensitivities_to=None, optimization_force_direction=None, topological_reference=None, compute_heat_transfer_coefficient=None, reference_temperature_result_type=None, compute_nusselt_number=None, reference_nusselt_number_length=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._type = None\n self._name = None\n self._pressure_type = None\n self._result_type = None\n self._age_of_fluid_diffusion = None\n self._turbulent_schmidt_number = None\n self._diffusion_coefficient = None\n self._clothing_coefficient_factor = None\n self._metabolic_rate_factor = None\n self._relative_humidity_factor = None\n self._compute_sensitivities_to = None\n self._optimization_force_direction = None\n self._topological_reference = None\n self._compute_heat_transfer_coefficient = None\n self._reference_temperature_result_type = None\n self._compute_nusselt_number = None\n self._reference_nusselt_number_length = None\n self.discriminator = 'type'\n\n self.type = type\n if name is not None:\n self.name = name\n if pressure_type is not None:\n self.pressure_type = pressure_type\n if result_type is not None:\n self.result_type = result_type\n if age_of_fluid_diffusion is not None:\n self.age_of_fluid_diffusion = age_of_fluid_diffusion\n if turbulent_schmidt_number is not None:\n self.turbulent_schmidt_number = turbulent_schmidt_number\n if diffusion_coefficient is not None:\n self.diffusion_coefficient = diffusion_coefficient\n if clothing_coefficient_factor is not None:\n self.clothing_coefficient_factor = clothing_coefficient_factor\n if metabolic_rate_factor is not None:\n self.metabolic_rate_factor = metabolic_rate_factor\n if relative_humidity_factor is not None:\n self.relative_humidity_factor = relative_humidity_factor\n if compute_sensitivities_to is not None:\n self.compute_sensitivities_to = compute_sensitivities_to\n if optimization_force_direction is not None:\n self.optimization_force_direction = optimization_force_direction\n if topological_reference is not None:\n self.topological_reference = topological_reference\n if compute_heat_transfer_coefficient is not None:\n self.compute_heat_transfer_coefficient = compute_heat_transfer_coefficient\n if reference_temperature_result_type is not None:\n self.reference_temperature_result_type = reference_temperature_result_type\n if compute_nusselt_number is not None:\n self.compute_nusselt_number = compute_nusselt_number\n if reference_nusselt_number_length is not None:\n self.reference_nusselt_number_length = reference_nusselt_number_length", "def generate_experimental_condition(dir_output, file_name_prefix,list_temperature, partial_pressure_H2O=0.02,SinteringTemperature=1600,SinteringTime=24):\r\n\r\n print(\"Enter the host element occupying the A-site\")\r\n set_A1 = input (\"Ex: Ba\\n\")\r\n print(\"Enter the valence of the A-site host element\")\r\n set_A1_valence = input(\"Ex: 2\\n\")\r\n frac_A1 = '1'\r\n print(\"Enter the host element occupying the B-site\")\r\n set_B1 = input (\"Ex: Zr\\n\")\r\n print(\"Enter the valence of the B-site host element\")\r\n set_B1_valence = input(\"Ex:4\\n\")\r\n print(\"Enter the fraction that describes the composition of the B-site host element\")\r\n frac_B1 = str(format(float( input (\"Ex:0.8\\n\")), '.2f'))\r\n print(\"Enter the dopant element occupying the B-site\")\r\n set_B2 = input (\"Ex: Sc\\n\")\r\n print(\"Enter the valence of the B-dopant\")\r\n set_B2_valence = input(\"Ex: 3\\n\")\r\n frac_B2 = str(format((1 - float(frac_B1)), '.2f'))\r\n\r\n # generate dataframe for base\r\n CA = set_A1 + set_B1 + frac_B1 + set_B2 + frac_B2 + \"O3\"\r\n dic = {'Composition':CA,\r\n 'A1':set_A1, 'Valence A1':set_A1_valence, 'fraction A1':frac_A1,\r\n 'B1':set_B1, 'Valence B1':set_B1_valence, 'fraction B1':frac_B1,\r\n 'B2':set_B2, 'Valence B2':set_B2_valence, 'fraction B2':frac_B2}\r\n df = pd.DataFrame(dic,index=['i',])\r\n\r\n # add columns name\r\n columns_all = ['Composition','Temperature / C','pH2O / atm','CH',\r\n 'A1','Valence A1','fraction A1','A2','Valence A2','fraction A2',\r\n 'B1','Valence B1','fraction B1','B2','Valence B2','fraction B2',\r\n 'B3','Valence B3','fraction B3','X1','Valence X1','fraction X1','fraction total']\r\n for c in columns_all:\r\n if not(c in df.columns):\r\n df[c] = float(np.NaN)\r\n df = df[columns_all]\r\n\r\n # add another experimental conditions\r\n df['pH2O / atm'] = partial_pressure_H2O\r\n df['Sintering temperature/C'] = SinteringTemperature\r\n df['Sintering time / h'] = SinteringTime\r\n df['fraction A2']='0'\r\n df['fraction B3']='0'\r\n df['X1']='O'\r\n df['Valence X1']='-2'\r\n df['fraction X1']='0.2'\r\n df['fraction total']='1'\r\n\r\n for cnt, tmp in enumerate(list_temperature):\r\n df['Temperature / C'] = tmp\r\n if cnt==0:\r\n df_all = df.copy()\r\n else:\r\n df_all = pd.concat([df_all,df], ignore_index=True)\r\n file_name = os.path.join(dir_output,'{:}_all.csv'.format(file_name_prefix, tmp))\r\n df_all.to_csv(file_name, index=False)", "def generate(self, terrain_type='random', site_type='random'):\n if terrain_type == 'random' and site_type == 'random':\n self.structure_type = choice(ref.structure_type_dct.keys())\n elif site_type == 'random':\n self.structure_type = choice(ref.structure_class_dct[terrain_type])\n elif terrain_type == 'random':\n self.structure_type = choice(ref.site_type_dct[site_type])\n else:\n self.structure_type = choice(\n [x for x in ref.site_type_dct[site_type] if ref.structure_type_dct[\n x]['class'] == terrain_type]\n )\n if self.get_attribute('site type') == 'resource':\n self.workers = 0\n else:\n self.workers = []\n self.monsters = []\n self.worker_capacity = self.get_attribute('worker capacity')\n self.time_until_harvest = self.get_attribute('time per harvest')\n return self", "def semi_all_static_feature(city):\n poi_frequency = np.load(exp_data_path + os.sep + 'poi_frequency' + os.sep + 'poi_frequency_{}.npy'.format(city),\n allow_pickle=True) # .tolist()\n poi_num = np.load(exp_data_path + os.sep + 'poi' + os.sep + 'poi_{}.npy'.format(city), allow_pickle=True)\n poi_entropy = np.load(exp_data_path + os.sep + 'poi_entropy' + os.sep + 'poi_entropy_{}.npy'.format(city),\n allow_pickle=True)\n road = np.load(exp_data_path + os.sep + 'roadnet' + os.sep + 'roadnet_{}.npy'.format(city), allow_pickle=True)\n transportation = np.load(exp_data_path + os.sep + 'transportation' + os.sep + 'transportation_{}.npy'.format(city),\n allow_pickle=True)\n commerce = np.load(exp_data_path + os.sep + 'commerce' + os.sep + 'commerce_{}.npy'.format(city), allow_pickle=True)\n\n file_name = exp_data_path + os.sep + 'station' + os.sep + 'all_demand_{}.npy'.format(city)\n demand_data = np.load(file_name, allow_pickle=True)\n total_num = demand_data[:, 0, -2, np.newaxis]\n slow_num = demand_data[:, 0, 0, np.newaxis]\n fast_num = demand_data[:, 0, 2, np.newaxis]\n\n raw_data = np.concatenate((slow_num, fast_num, total_num, poi_frequency, poi_num, poi_entropy, road, transportation, commerce), axis=1)\n csv_data = pd.DataFrame(raw_data, columns=SEMI_GENERAL_HEADER)\n print(csv_data.shape)\n # print(csv_data.iloc[:, 2])\n\n file_path = exp_data_path + os.sep + 'static' + os.sep + 'semi_static_feature_{}.csv'.format(city)\n if os.path.exists(file_path):\n os.remove(file_path)\n csv_data.to_csv(file_path)\n pass", "def test_electronic_type(fixture_code, generate_structure):\n code = fixture_code('quantumespresso.pw')\n structure = generate_structure()\n\n with pytest.raises(NotImplementedError):\n for electronic_type in [ElectronicType.AUTOMATIC]:\n PwBandsWorkChain.get_builder_from_protocol(code, structure, electronic_type=electronic_type)\n\n builder = PwBandsWorkChain.get_builder_from_protocol(code, structure, electronic_type=ElectronicType.INSULATOR)\n\n for namespace in [builder.relax['base'], builder.scf, builder.bands]:\n parameters = namespace['pw']['parameters'].get_dict()\n assert parameters['SYSTEM']['occupations'] == 'fixed'\n assert 'degauss' not in parameters['SYSTEM']\n assert 'smearing' not in parameters['SYSTEM']", "def measurements():\n \n number_of_days = 100\n\n\n # the temperature is usually 20 but can vary by 10 degrees at a time\n \n average_temperature, standard_deviation = 20, 10\n\n # ground truth temperature\n\n _c = np.random.normal(average_temperature, standard_deviation, number_of_days)\n\n # the therometer is usually 2 off but can be off by up to 4 degrees\n \n average_jankiness, standard_jankiness = 2, 2\n \n temperature_deviations = np.random.normal(average_jankiness, standard_jankiness, number_of_days)\n\n c = _c + temperature_deviations\n \n _f = convert(_c)\n\n average_jankiness, standard_jankiness = 2.4, 3.3\n\n therometer_deviations = np.random.normal(average_jankiness, standard_jankiness, number_of_days)\n \n f = _f + therometer_deviations\n \n return pd.DataFrame({\n 'c': c,\n 'f': f,\n })", "async def get_temperatures(self, **kwargs: Any) -> Dict[str, float]:\n ...", "def tristimulus_weighting_factors_ASTME2022(cmfs, illuminant, shape, k=None):\n\n if cmfs.shape.interval != 1:\n raise ValueError('\"{0}\" shape \"interval\" must be 1!'.format(cmfs))\n\n if illuminant.shape.interval != 1:\n raise ValueError(\n '\"{0}\" shape \"interval\" must be 1!'.format(illuminant))\n\n global _TRISTIMULUS_WEIGHTING_FACTORS_CACHE\n if _TRISTIMULUS_WEIGHTING_FACTORS_CACHE is None:\n _TRISTIMULUS_WEIGHTING_FACTORS_CACHE = CaseInsensitiveMapping()\n\n name_twf = ', '.join((cmfs.name, illuminant.name, str(shape), str(k)))\n if name_twf in _TRISTIMULUS_WEIGHTING_FACTORS_CACHE:\n return _TRISTIMULUS_WEIGHTING_FACTORS_CACHE[name_twf]\n\n Y = cmfs.values\n S = illuminant.values\n\n interval_i = DEFAULT_INT_DTYPE(shape.interval)\n W = S[::interval_i, np.newaxis] * Y[::interval_i, :]\n\n # First and last measurement intervals *Lagrange Coefficients*.\n c_c = lagrange_coefficients_ASTME2022(interval_i, 'boundary')\n # Intermediate measurement intervals *Lagrange Coefficients*.\n c_b = lagrange_coefficients_ASTME2022(interval_i, 'inner')\n\n # Total wavelengths count.\n w_c = len(Y)\n # Measurement interval interpolated values count.\n r_c = c_b.shape[0]\n # Last interval first interpolated wavelength.\n w_lif = w_c - (w_c - 1) % interval_i - 1 - r_c\n\n # Intervals count.\n i_c = W.shape[0]\n i_cm = i_c - 1\n\n # \"k\" is used as index in the nested loop.\n k_n = k\n\n for i in range(3):\n # First interval.\n for j in range(r_c):\n for k in range(3):\n W[k, i] = W[k, i] + c_c[j, k] * S[j + 1] * Y[j + 1, i]\n\n # Last interval.\n for j in range(r_c):\n for k in range(i_cm, i_cm - 3, -1):\n W[k, i] = (W[k, i] + c_c[r_c - j - 1, i_cm - k] * S[j + w_lif]\n * Y[j + w_lif, i])\n\n # Intermediate intervals.\n for j in range(i_c - 3):\n for k in range(r_c):\n w_i = (r_c + 1) * (j + 1) + 1 + k\n W[j, i] = W[j, i] + c_b[k, 0] * S[w_i] * Y[w_i, i]\n W[j + 1, i] = W[j + 1, i] + c_b[k, 1] * S[w_i] * Y[w_i, i]\n W[j + 2, i] = W[j + 2, i] + c_b[k, 2] * S[w_i] * Y[w_i, i]\n W[j + 3, i] = W[j + 3, i] + c_b[k, 3] * S[w_i] * Y[w_i, i]\n\n # Extrapolation of potential incomplete interval.\n for j in range(\n DEFAULT_INT_DTYPE(w_c - ((w_c - 1) % interval_i)), w_c, 1):\n W[i_cm, i] = W[i_cm, i] + S[j] * Y[j, i]\n\n W *= 100 / np.sum(W, axis=0)[1] if k_n is None else k_n\n\n _TRISTIMULUS_WEIGHTING_FACTORS_CACHE[name_twf] = W\n\n return W", "def weights_treatment_parameters(init_dict, GRID):\n GRID = np.linspace(0.01, 0.99, num=99, endpoint=True)\n\n coeffs_untreated = init_dict[\"UNTREATED\"][\"params\"]\n coeffs_treated = init_dict[\"TREATED\"][\"params\"]\n cov = construct_covariance_matrix(init_dict)\n x = simulate_covariates(init_dict)\n\n # We take the specified distribution for the cost shifters from the paper.\n cost_mean, cost_sd = -0.0026, np.sqrt(0.270)\n v_mean, v_sd = 0.00, np.sqrt(cov[2, 2])\n\n eval_points = norm.ppf(GRID, loc=v_mean, scale=v_sd)\n\n ate_weights = np.tile(1.0, 99)\n tut_weights = norm.cdf(eval_points, loc=cost_mean, scale=cost_sd)\n\n tt_weights = 1 - tut_weights\n\n def tut_integrand(point):\n eval_point = norm.ppf(point, loc=v_mean, scale=v_sd)\n return norm.cdf(eval_point, loc=cost_mean, scale=cost_sd)\n\n def tt_integrand(point):\n eval_point = norm.ppf(point, loc=v_mean, scale=v_sd)\n return norm.cdf(eval_point, loc=cost_mean, scale=cost_sd)\n\n # Scaling so that the weights integrate to one.\n tut_scaling = quad(tut_integrand, 0.01, 0.99)[0]\n tut_weights /= tut_scaling\n\n tt_scaling = quad(tt_integrand, 0.01, 0.99)[0]\n tt_weights /= tt_scaling\n\n mte = mte_information(coeffs_treated, coeffs_untreated, cov, GRID, x, init_dict)\n\n return ate_weights, tt_weights, tut_weights, mte", "def setup_model(msid, t0, t1, model_spec, init):\n\n model = xija.ThermalModel(msid, start=t0, stop=t1, model_spec=model_spec)\n for key, value in init.items():\n if isinstance(value, dict):\n model.comp[key].set_data(value['data'], value['times'])\n else:\n model.comp[key].set_data(value)\n\n return model", "def setup_model(msid, t0, t1, model_spec, init):\n\n model = xija.ThermalModel(msid, start=t0, stop=t1, model_spec=model_spec)\n for key, value in init.items():\n if isinstance(value, dict):\n model.comp[key].set_data(value['data'], value['times'])\n else:\n model.comp[key].set_data(value)\n\n return model", "def _conditions(self, beg=-90, intvl=20, con_type='ori', stim='bar', \n\t\t\t\t\tbiphasic=True, unit='deg', con_list=[], temp_freq = 2):\n\t\t\n\t\t\n\t\tcon_types = ['ori', 'spat_freq', 'temporal_freq', 'chromatic', 'dl_bar']\n\t\tstims = ['bar', 'grating']\n\t\t\n\t\t\n\t\t# Checking if condition and stimulus type recognised. \n\t\tif not con_type.lower() in con_types:\n\t\t\tprint('con_type not recognised. ' \n\t\t\t\t\t'Predefined options, if desired, are %s \\n'%con_types\n\t\t\t\t\t)\n\n\t\tif not stim.lower() in stims:\n\t\t\tprint('stimulus not recognised. ' \n\t\t\t\t\t'Predefined options, if desired, are %s \\n'%con_types\n\t\t\t\t\t)\n\n\n\t\t\n\t\tn_con = self.parameters['conditions']\n\t\t\n\t\tself.parameters['condition_type'] = con_type.lower()\n\t\tself.parameters['condition_unit'] = unit.capitalize()\n\t\tself.parameters['stimulus'] = stim.lower()\n\t\t\n\t\tif stim.lower() == stims[1]:\n\t\t\t# Gratings are GENERALLY not biphasic\n\t\t\tself.parameters['biphasic'] = 'N/A'\n\t\telse:\n\t\t\tself.parameters['biphasic'] = biphasic\n\t\t\n\t\t# Address issue of whether the sampling rate suits teh temporal frequency of \n\t\t# the grating for FFT analysis\n\t\tif stim.lower() == 'grating':\n\t\t\tself.parameters['temp_freq'] = float(temp_freq)\n\t\t\t\n\t\t\t# Sample rate must be a multiple of F1/temp_freq for it to be a frequency measured\n\t\t\t# in the FFT.\n\t\t\tsamp_rate = 1/float(self.bin_width)\n\t\t\t\n\t\t\t\n\t\t\tassert samp_rate % temp_freq == 0., ('Bin_width (%s) is incompatible wih obtaining' \n\t\t\t\t\t\t\t\t\t\t\t\t 'an FFT containing the specified temp_freq (%s). '\n\t\t\t\t\t\t\t\t\t\t\t\t 'The sampling frequency (1/bin_width) must be a'\n\t\t\t\t\t\t\t\t\t\t\t\t 'multiple of the temp_freq. \\n\\n Try as a' \n\t\t\t\t\t\t\t\t\t\t\t\t 'bin_width %s and rerun self._sort().'\n\t\t\t\t\t\t\t\t\t\t\t\t % (self.bin_width, temp_freq, \n\t\t\t\t\t\t\t\t\t\t\t\t\t1/(np.ceil(samp_rate/float(temp_freq))*temp_freq)))\n\t\t\n\t\tself.cond_label = []\n\n\t\t\n\t\tdef circ(ori, bound = 360):\n\t\t\t\"\"\"Func that Ensures all orientation values are between 0 and 360 degrees.\n\t\t\t\"\"\"\n\t\t\t# ori[ori<-360] += 720\n\t\t\t# ori[ori<0] += 360\n\t\t\t# ori[ori>360] -= 360\n\t\t\t# ori[ori>720] -= 720\n\n\n\t\t\treturn ori % bound\n\n\t\t# if list of conditions provided directly\n\t\tif len(con_list) > 0:\n\t\t\t\n\t\t\t# Must match number of conditions\n\t\t\tassert len(con_list) == n_con, ('the number of labels provided '\n\t\t\t\t\t\t\t\t\t\t'manually (%s) does not match the '\n\t\t\t\t\t\t\t\t\t\t'number of conditions (%s).' % \n\t\t\t\t\t\t\t\t\t\t(len(con_list), n_con))\n\t\t\t \n\t\t\t# Must all be strings \n\t\t\tassert all(isinstance(l, str) for l in con_list), ('not all the '\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t 'labels provided '\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t 'are strings')\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t# List of conditions as strings\n\t\t\tself.cond_label = con_list\n\t\t\t\n\t\t\t# Convert to floats\n\t\t\t# Relying on numpy conversion error should list be unable to convert to float.\n\t\t\tself.conditions = np.array(con_list).astype('float')\n\t\t\t\n\t\t\t\n\t\t\tif biphasic:\n\t\t\t\t\n\n\t\t\t\t# self.conditions has been defined as an np.ndarray\n\t\t\t\tself.conditions2 = self.conditions \n\n\t\t\t# # Generate list of strings or labels\n\t\t\t# for c in range(n_con):\n\t\t\t# label = '%s / %s %s' %(self.conditions[c], self.conditions2[c],\n\t\t\t# self.parameters['condition_unit'])\n\t\t\t# self.cond_label.append(label)\n\n\t\t\t# else:\n\t\t\t# for c in range(n_con):\n\t\t\t\t\t\n\t\t\t# label = '%s %s' %(self.conditions[c],\n\t\t\t# self.parameters['condition_unit'])\n\t\t\t# self.cond_label.append(label)\n\n\t\t\t\t\n\t\t\n\t\t# if condition tpye is orientation\n\t\telif con_type.lower() == con_types[0]:\n\t\t\t\n\t\t\t# Generate full range of conditions\n\t\t\tself.conditions = circ(np.arange(beg, beg+(n_con*intvl), intvl))\n\t\t\t\n\t\t\tassert len(self.conditions) == n_con, ('The amount of condition labels (%s) '\n\t\t\t\t\t\t\t\t\t\t\t'and conditions (%s) do not match; '\n\t\t\t\t\t\t\t\t\t\t\t'check your condition parameters' % \n\t\t\t\t\t\t\t\t\t\t\t(self.cond_label.size, n_con))\n\t\t\t\n\t\t\tif biphasic:\n\t\t\t\t\n\n\t\t\t\t# self.conditions has been defined as an np.ndarray\n\t\t\t\tself.conditions2 = circ(self.conditions + 180) \n\n\t\t\t\t# Generate list of strings or labels\n\t\t\t\tfor c in range(n_con):\n\t\t\t\t\tlabel = '%s / %s %s' %(self.conditions[c], self.conditions2[c],\n\t\t\t\t\t\t\t\t\t\t self.parameters['condition_unit'])\n\t\t\t\t\tself.cond_label.append(label)\n\t\t\t# Generate list of strings for non-biphasic. \n\t\t\telse:\n\t\t\t\t\n\t\t\t\tfor c in range(n_con):\n\t\t\t\t\tlabel = '%s %s' %(self.conditions[c],\n\t\t\t\t\t\t\t\t\t self.parameters['condition_unit'])\n\t\t\t\t\tself.cond_label.append(label)\n\t\t\t\t\t\n\t\t# IF condition type is Spat Freq \n\t\telif con_type.lower() == con_types[1]:\n\t\t\tself.conditions = np.arange(beg, beg + (n_con*intvl), intvl)\n\t\t\t\n\t\t\tassert len(self.conditions) == n_con, ('The amount of condition labels (%s) '\n\t\t\t\t\t\t\t\t\t\t\t'and conditions (%s) do not match; '\n\t\t\t\t\t\t\t\t\t\t\t'check your condition parameters' % \n\t\t\t\t\t\t\t\t\t\t\t(self.cond_label.size, n_con))\n\n\t\t\tfor c in range(n_con):\n\t\t\t\tlabel = '%s %s' %(self.conditions[c], self.parameters['condition_unit'])\n\t\t\t\tself.cond_label.append(label)\n\n\t\t# IF condition type is dl_bar\t\t\t\t\t\n\t\telif con_type.lower() == con_types[4]:\n\n\t\t\tself.conditions = np.array([0, 1])\n\t\t\tself.cond_label = ['dark','light']\n\n\t\t\tif len(con_list) > 0:\n\t\t\t\tself.conditions = np.array(con_list).astype('float')\n\n\t\t\t\tif con_list[0] > con_list[1]:\n\t\t\t\t\tself.cond_label = self.cond_label[::-1]\n\n\t\t\tif biphasic:\n\n\t\t\t\tself.conditions2 = self.conditions\n\n\t\t\t\tself.cond_label.extend(\n\t\t\t\t\t[\n\t\t\t\t\t\tcl + ' second'\n\t\t\t\t\t\tfor cl in self.cond_label\n\t\t\t\t\t]\t\n\t\t\t\t\t)\n\n\n\n\t\t# if condition type is not predefined in this method, presume linear range \n\t\telif not con_type.lower() in con_types:\n\t\t\t\n\t\t\tself.conditions = np.arange(beg, beg+(n_con*intvl), intvl)\n\n\n\t\t\tif biphasic:\n\t\t\t\t\n\n\t\t\t\t# self.conditions has been defined as an np.ndarray\n\t\t\t\tself.conditions2 = self.conditions \n\n\t\t\t\t# Generate list of strings or labels\n\t\t\t\tfor c in range(n_con):\n\t\t\t\t\tlabel = '%s / %s %s' %(self.conditions[c], self.conditions2[c],\n\t\t\t\t\t\t\t\t\t\t self.parameters['condition_unit'])\n\t\t\t\t\tself.cond_label.append(label)\n\n\t\t\telse:\n\t\t\t\tfor c in range(n_con):\n\t\t\t\t\t\n\t\t\t\t\tlabel = '%s %s' %(self.conditions[c],\n\t\t\t\t\t\t\t\t\t self.parameters['condition_unit'])\n\t\t\t\t\tself.cond_label.append(label)", "def generate_weather_data(self):\n months = pd.to_datetime(self.output['Local Time']).dt.month\n self.output['Month'] = months # set month values for later joins\n\n # merge output data frame with historical data to get ranges\n keys = ['Location', 'Month']\n m = pd.merge(self.output, self.histdata, how='left',\n left_on=keys, right_on=keys)\n\n # uniformly select random pressure, temperature\n # and humidity values between the historical max and min ranges\n r = np.random.rand(m.shape[0])\n m['Temperature'] = ((m['Tmean_high'] - m['Tmean_low']\n ) * r + m['Tmean_low']).round(1)\n m['Pressure'] = ((m['Pmax'] - m['Pmin']) * r + m['Pmin']).round(1)\n m['Humidity'] = ((m['Hmax'] - m['Hmin']) * r + m['Hmin']).astype(int)\n\n # drop redundant columns and assign to output\n dcols = ['Month', 'Timezone', 'Pmax', 'Pmin',\n 'Hmax', 'Hmin', 'Tmean_high', 'Tmean_low']\n m.drop(columns=dcols, inplace=True)\n self.output = m", "def get_temperatures(self):\n\t\ttry:\n\t\t\ttemps = psutil.sensors_temperatures()\n\t\texcept AttributeError:\n\t\t\treturn {'available': False}\n\t\t\n\t\tcollection = {'available': True, 'measurements': []}\n\t\tfor name, measurements in temps.items():\n\t\t\tif name == 'k10temp':\n\t\t\t\tcollection['measurements'].append({\n\t\t\t\t\t'name': 'AMD Processor',\n\t\t\t\t\t'current': measurements[0].current,\n\t\t\t\t\t'limit': measurements[0].critical\n\t\t\t\t})\n\n\t\treturn collection", "def get_temp(html) -> None:\n\tif page_type_dict['general']:\n\t\tt_text = html.find('div', {'class': '_1HBR'}).text\n\t\tt_digit = ''.join([i for i in t_text if i.isdigit()])\n\t\tweather_dict['temperature'] = t_digit\n\telse:\n\t\tre_temp_class = re.compile('.*_2ezK.*') # regex template: str w/ '_2ezK'\n\t\ttemp_class = html.find('div', {'class': re_temp_class}) \n\t\t# we've got smth like: 'ะะพั‡ัŒัŽ14ยฐะฃั‚ั€ะพะผ19ยฐะ”ะฝั‘ะผ24ยฐะ’ะตั‡ะตั€ะพะผ22ยฐ\n\t\tweather_lst = temp_class.text.split('ยฐ') # ['ะะพั‡ัŒัŽ14','ะฃั‚ั€ะพะผ19',...]\n\t\tint_weather_lst = [int(number.group()) for number in ( # for all the elems \n\t\t\tre.search(r'\\d+', word) for word in weather_lst) if number] # keep integers\n\t\t# result: [14, 19, 24, 22]\n\t\tweather_dict['temperature'] = int_weather_lst", "def distribute(self, date_time, air_temp, vapor_pressure=None,\n dew_point=None, cloud_factor=None):\n\n self._logger.debug('%s Distributing thermal' % date_time)\n\n # calculate clear sky thermal\n if self.clear_sky_method == 'marks1979':\n cth = np.zeros_like(air_temp, dtype=np.float64)\n envphys_c.ctopotherm(\n air_temp, dew_point,\n self.dem,\n self.sky_view_factor,\n cth,\n self.config['marks1979_nthreads'])\n\n elif self.clear_sky_method == 'dilley1998':\n cth = clear_sky.Dilly1998(air_temp, vapor_pressure/1000)\n\n elif self.clear_sky_method == 'prata1996':\n cth = clear_sky.Prata1996(air_temp, vapor_pressure/1000)\n\n elif self.clear_sky_method == 'angstrom1918':\n cth = clear_sky.Angstrom1918(air_temp, vapor_pressure/1000)\n\n # terrain factor correction\n if (self.sky_view_factor is not None) and \\\n (self.clear_sky_method != 'marks1979'):\n # apply (emiss * skvfac) + (1.0 - skvfac) to the longwave\n cth = cth * self.sky_view_factor + (1.0 - self.sky_view_factor) * \\\n STEF_BOLTZ * air_temp**4\n\n # make output variable\n self.thermal_clear = cth.copy()\n\n # correct for the cloud factor\n # ratio of measured/modeled solar indicates the thermal correction\n if self.correct_cloud:\n if self.cloud_method == 'garen2005':\n cth = cloud.Garen2005(cth,\n cloud_factor)\n\n elif self.cloud_method == 'unsworth1975':\n cth = cloud.Unsworth1975(cth,\n air_temp,\n cloud_factor)\n\n elif self.cloud_method == 'kimball1982':\n cth = cloud.Kimball1982(cth,\n air_temp,\n vapor_pressure/1000,\n cloud_factor)\n\n elif self.cloud_method == 'crawford1999':\n cth = cloud.Crawford1999(cth,\n air_temp,\n cloud_factor)\n\n # make output variable\n self.thermal_cloud = cth.copy()\n\n # correct for vegetation\n if self.correct_veg:\n cth = vegetation.thermal_correct_canopy(cth,\n air_temp,\n self.veg_tau,\n self.veg_height)\n\n # make output variable\n self.thermal_veg = cth.copy()\n\n self.thermal = utils.set_min_max(cth, self.min, self.max)", "def __init__(self, temperatures, daytypes, consumptions, nb_days, nb_particles, sigma2, kappa, u_heat):\n self.temperatures = temperatures\n self.daytypes = daytypes\n self.consumptions = consumptions\n self.nb_days = nb_days\n self.nb_particles = nb_particles\n self.sigma2 = sigma2\n self.kappa = kappa\n self.u_heat = u_heat\n #Var init\n self.s = np.zeros((nb_days, nb_particles)) \n self.g_heat = np.zeros((nb_days, nb_particles))\n #sigma_s and sigma_g are fixed\n self.sigma_s_star_2 = np.zeros((1, nb_particles)) \n self.sigma_g_star_2 = np.zeros((1, nb_particles))\n self.x_season = np.zeros((1, nb_particles))\n self.x_heat = np.zeros((1, nb_particles))\n self.x = np.zeros((1, nb_particles))\n self.w = np.zeros((1, nb_particles))", "def generateHourlyWeatherInCSV(self):\n\t\tfor town in self.helper.getTowns():\n\t\t\tself.storeHourlyWeatherInCSV(unicode(town),\"f\")\n\t\t\tself.storeHourlyWeatherInCSV(unicode(town),\"c\")", "def initialise_templates(self, tel_type):\n for t in tel_type:\n if tel_type[t] in self.prediction.keys():\n continue\n\n self.prediction[tel_type[t]] = \\\n TableInterpolator(self.root_dir + \"/\" +\n self.file_names[tel_type[t]])\n\n return True", "def env_temperature(v3: \"float\", v4: \"float\") -> \"float\":", "def get_hc_external(self, weather, surface, h_surface, terrain):\r\n roughness = surface.construction[0].roughness_unit # Change back to this line...left as below to match Na's\r\n if roughness == \"VeryRough\":\r\n D = 11.58\r\n E = 5.894\r\n F = 0\r\n elif roughness == \"Rough\":\r\n D = 12.49\r\n E = 4.065\r\n F = 0.028\r\n elif roughness == \"MediumRough\":\r\n D = 10.79\r\n E = 4.192\r\n F = 0.0\r\n elif roughness == \"MediumSmooth\":\r\n D = 8.23\r\n E = 4.0\r\n F = -0.057\r\n elif roughness == \"Smooth\":\r\n D = 10.22\r\n E = 3.1\r\n F = 0.0\r\n elif roughness == \"VerySmooth\":\r\n D = 8.23\r\n E = 3.33\r\n F = -0.036\r\n else:\r\n D = 8.23\r\n E = 4.0\r\n F = -0.057\r\n print \"No Roughness Value Found so Set Default Values of 8.23,4.0,-0.057\"\r\n\r\n wind_speed_temp = weather[\"wind_speed\"]\r\n # Terrain Lookup Table\r\n if terrain == 'Flat or Open Countryside':\r\n sigma = 270\r\n a = 0.14\r\n elif terrain == 'Rough or Wooded Country':\r\n sigma = 370\r\n a = 0.22\r\n elif terrain == 'Towns and City Scapes':\r\n sigma = 460\r\n a = 0.33\r\n elif terrain == 'Ocean Front Areas':\r\n sigma = 210\r\n a = 0.10\r\n elif terrain == 'Urban, Industrial, or Forest':\r\n sigma = 370\r\n a = 0.22\r\n else:\r\n sigma = 370\r\n a = 0.22\r\n print \"No Terrain Type Found so Set Default Values of 370,0.22\"\r\n terrain_sigma = sigma\r\n terrain_cof = a\r\n\r\n # Adjust the wind speed...Stable air above human inhabited areas:\r\n #wind_speed = wind_speed_temp * ((h_surface / 10) ** 0.5) # This was the line used to get wind_speed before terrain was added\r\n # Wind speed corrected for terrain differences;\r\n wind_speed = wind_speed_temp * ((270/10) ** 0.14) * (h_surface/terrain_sigma) ** terrain_cof\r\n #print wind_speed\r\n # Calculate the hc_external\r\n # hc_external= D+E*Wind_speed+F*Wind_speed^2\r\n hc_external = D + (E * wind_speed) + (F * wind_speed ** 2)\r\n\r\n # depending on the direction of the wind adjust the hc_external...as of versions 3 and 4 this part seems omitted\r\n #x = abs(wind_speed_dir - azimuth)\r\n #if x > 100:\r\n # if x < 260:\r\n # hc_external *= 0.5\r\n #print \"hc_external : \", hc_external, D, E, F\r\n\r\n return round(hc_external, 5)", "def get_typical_days(weather_data, cfg):\n settings = cfg['settings']\n # Flag to determine if any holidays have been found:\n interpolation_freq = pd.Timedelta(settings['intervall'])\n flag_holidays_found = False\n\n # --- Season --------------------------------------------------------------\n # The 'season' (transition, summer or winter) is defined by the daily\n # average of the ambient temperature.\n\n # Resample ambient temperatures in DataFrame to days and take mean\n tamb_avg_list = weather_data['TAMB'].resample('D', label='right',\n closed='right').mean()\n\n # Write the daily mean values to all original time steps\n tamb_avg_list = tamb_avg_list.reindex(weather_data.index)\n tamb_avg_list.fillna(method='backfill', inplace=True)\n\n season_list = []\n\n # The VDI 4655 default heat limit is 15ยฐC (definition of summer days).\n # For low- and zero-energy houses, the average daily temperatures have\n # to be adapted to the actual conditions. (see VDI 4655, page 15)\n Tamb_heat_limit = settings.get('Tamb_heat_limit', 15) # ยฐC\n\n # Read through list of temperatures line by line and apply the definition\n for tamb_avg in tamb_avg_list:\n if tamb_avg < 5:\n season_list.append('W') # Winter\n elif tamb_avg > Tamb_heat_limit:\n season_list.append('S') # Summer\n else:\n season_list.append('U') # รœbergang (Transition)\n\n # Alternative season determination method:\n # From 'BDEW Standardlastprofile':\n season_list_BDEW = get_season_list_BDEW(weather_data)\n\n # Save the results in the weather_data DataFrame\n weather_data['TAMB_d'] = tamb_avg_list\n if settings.get('use_BDEW_seasons', False) is False:\n weather_data['season'] = season_list\n elif settings.get('use_BDEW_seasons', False) is True:\n weather_data['season'] = season_list_BDEW\n weather_data['season'].replace(to_replace={'Winter': 'W',\n 'Sommer': 'S',\n 'รœbergangszeit': 'U'},\n inplace=True)\n\n # Store the BDEW seasons separately\n weather_data['season_BDEW'] = season_list_BDEW\n\n steps_per_day = 24 / (interpolation_freq.seconds / 3600.0)\n settings['steps_per_day'] = steps_per_day\n logger.debug('Number of days in winter: ' +\n str(season_list.count('W')/steps_per_day))\n logger.debug('Number of days in summer: ' +\n str(season_list.count('S')/steps_per_day))\n logger.debug('Number of days in transition: ' +\n str(season_list.count('U')/steps_per_day))\n\n # Use https://pypi.org/project/holidays/ for holiday-detection\n used_holidays = []\n if settings.get('holidays'):\n country = settings['holidays'].get('country', 'DE')\n province = settings['holidays'].get('province', None)\n used_holidays = holidays.country_holidays(country, subdiv=province)\n\n # Read through list of days line by line and see what kind of day they are.\n # Problem: In the weather data, the bins are labeled on the 'right'\n # (Each time stamp describes the interval before). Therefore the time stamp\n # midnight (00:00:00) describes the last interval of the day before.\n # However, asking for the weekday of a midnight time stamp gives the name\n # of the next day. Thus the resulting list of weekdays is shifted by one\n # time step.\n weekdays_list = []\n weekdays_list_BDEW = []\n for date_obj in weather_data.index:\n if date_obj.dayofweek == 6: # 6 equals Sunday\n weekdays_list.append('S')\n weekdays_list_BDEW.append('Sonntag')\n elif date_obj in used_holidays:\n weekdays_list.append('S')\n weekdays_list_BDEW.append('Sonntag')\n flag_holidays_found = True\n elif date_obj.dayofweek == 5: # 5 equals Saturday\n weekdays_list.append('W')\n weekdays_list_BDEW.append('Samstag')\n else:\n weekdays_list.append('W')\n weekdays_list_BDEW.append('Werktag')\n\n # Solution to problem: We take the first list entry, then add the rest of\n # the list minus the very last entry.\n weather_data['weekday'] = [weekdays_list[0]] + weekdays_list[:-1]\n weather_data['weekday_BDEW'] = [weekdays_list_BDEW[0]] + \\\n weekdays_list_BDEW[:-1]\n\n # Print a warning, if necessary\n if flag_holidays_found is False:\n logger.warning('Warning! No holidays were found for the chosen time!')\n\n # --- Cloud cover amount --------------------------------------------------\n ccover_avg_list = weather_data['CCOVER'].resample('D', label='right',\n closed='right').mean()\n ccover_avg_list = ccover_avg_list.reindex(weather_data.index)\n ccover_avg_list.fillna(method='backfill', inplace=True)\n # The interpolation to 15min may cause a slight difference of daily means\n # compared to 60min, in rare cases shifting from >5.0 to <5.0.\n # Rounding to the first decimal place may prevent this issue.\n ccover_avg_list = ccover_avg_list.round(decimals=1)\n\n # Read through list of cloud cover line by line and apply the definition\n cloudy_list = []\n for ccover_avg in ccover_avg_list:\n if (ccover_avg < 5.0):\n cloudy_list.append('H')\n else:\n cloudy_list.append('B')\n\n weather_data['cloudy'] = cloudy_list\n\n # Combine the gathered information from season, weekday and cloudyness\n # into one 'typtag' key\n weather_data['typtag'] = weather_data['season'] + \\\n weather_data['weekday'] + weather_data['cloudy']\n\n # For summer days, the VDI 4655 makes no distinction in terms of cloud\n # amount. So we need to replace 'heiter' and 'bewรถlkt' with 'X'\n typtage_replace = {'typtag':\n {'SWH': 'SWX', 'SWB': 'SWX', 'SSH': 'SSX', 'SSB': 'SSX'}\n }\n weather_data.replace(to_replace=typtage_replace, inplace=True)", "async def test_thermostat_heatit_z_trm3_no_value(\n hass: HomeAssistant, client, climate_heatit_z_trm3_no_value, integration\n) -> None:\n # When the config parameter that specifies what sensor to use has no value, we fall\n # back to the first temperature sensor found on the device\n state = hass.states.get(CLIMATE_FLOOR_THERMOSTAT_ENTITY)\n assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 22.5", "def tempWater(sample):\n sample *= .0009\n sample *= 1000\n celsius = (sample - 20.5128) * 0.0512\n return round(celsius,2)", "def get_model_with_properties():\n \n m = ConcreteModel()\n\n # ------------------------------------------------------------------\n # Data\n # ------------------------------------------------------------------\n\n m.np = 25 # Number of possible tays\n m.c = 4 # Number of components\n m.lc = 1 # Light component\n m.hc = 4 # Heavy component\n\n #### Constant parameters\n m.Rgas = 8.314 # Ideal gas constant in J/mol K\n m.Tref = 298.15 # Reference temperature in K\n\n #### Product specifications\n m.xspec_lc = 0.99 # Final liquid composition for methanol (1)\n m.xspec_hc = 0.99 # Fnal liquid composition for butanol (4)\n m.xspec_inter2 = 0.99 # Final liquid composition for ethanol (2)\n m.xspec_inter3 = 0.99 # Final liquid composition for propanol (3)\n m.Ddes = 50 # Final flowrate in distillate in mol/s\n m.Bdes = 50 # Final flowrate in bottoms in mol/s\n m.Sdes = 50 # Final flowrate in side product streams in mol/s\n\n # #### Known initial values\n m.Fi = m.Ddes + m.Bdes + 2 * m.Sdes # Side feed flowrate in mol/s\n m.Vi = 400 # Initial value for vapor flowrate in mol/s\n m.Li = 400 # Initial value for liquid flowrate in mol/s\n\n m.Tf = 358 # Side feed temperature in K\n\n m.Preb = 1.2 # Reboiler pressure in bar\n m.Pbot = 1.12 # Bottom-most tray pressure in bar\n m.Ptop = 1.08 # Top-most tray pressure in bar\n m.Pcon = 1.05 # Condenser pressure in bar\n m.Pf = 1.02\n\n m.rr0 = 0.893 # Internal reflux ratio initial value\n m.bu0 = 0.871 # Internal reflux ratio initial value\n\n\n #### Scaling factors\n m.Hscale = 1e3 \n m.Qscale = 1e-3 \n\n \n #### Constants for the calculation of liquid heat capacity\n m.cpc = {} # Constant 1 for liquid heat capacity \n m.cpc2 = {} # Constant 2 for liquid heat capacity \n m.cpc[1] = m.Rgas \n m.cpc[2] = 1\n m.cpc2['A', 1] = 1 / 100\n m.cpc2['B', 1] = 1 / 1e4\n m.cpc2['A', 2] = 1\n m.cpc2['B', 2] = 1\n\n\n # ------------------------------------------------------------------\n # Physical Properties\n #\n # Notation:\n # MW ........................ molecular weight in g/gmol\n # TB ........................ boiling point temperature in K\n # TC ........................ critical temperature in K\n # PC ........................ critical pressure in bar\n # w ........................ acentric factor\n # lden ...................... liquid density g/m3,\n # dHvap ..................... heat of vaporization in J/mol.\n # vpA, vpB, vpC, and vpD .... vapor pressure constants\n # cpA, cpB, cpC, and cpD .... heat capacity constants J/mol:\n # 1 for liq and 2 for vapor phase\n #\n # Reference A: R.C. Reid, J.M. Prausnitz and B.E. Poling,\n # \"The Properties of gases and liquids\", 1987 and 2004 Eds.\n #\n # ------------------------------------------------------------------\n\n m.prop = {} # Properties of components:\n cpL = {} # Ruczika-D method for liquid heat capacity calculation\n # (Reference A, page 6.20)\n sumA = {}\n sumB = {}\n sumC = {}\n cpL['a', 'C(H3)(C)'] = 4.19845\n cpL['b', 'C(H3)(C)'] = -0.312709\n cpL['c', 'C(H3)(C)'] = 0.178609\n cpL['a', 'C(H2)(C2)'] = 2.7345\n cpL['b', 'C(H2)(C2)'] = 0.122732\n cpL['c', 'C(H2)(C2)'] = -0.123482\n cpL['a', 'C(H2)(C)(O)'] = 0.517007\n cpL['b', 'C(H2)(C)(O)'] = 1.26631\n cpL['c', 'C(H2)(C)(O)'] = -0.0939713\n cpL['a', 'O(H)(C)'] = 16.1555\n cpL['b', 'O(H)(C)'] = -11.938\n cpL['c', 'O(H)(C)'] = 2.85117\n cpL['a', 'C(H3)(O)'] = 3.70344\n cpL['b', 'C(H3)(O)'] = -1.12884\n cpL['c', 'C(H3)(O)'] = 0.51239\n sumA[1] = (cpL['a', 'C(H3)(O)']\n + cpL['a', 'O(H)(C)']) \n sumB[1] = (cpL['b', 'C(H3)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[1] = (cpL['c', 'C(H3)(O)']\n + cpL['c', 'O(H)(C)'])\n sumA[2] = (cpL['a', 'C(H3)(C)']\n + cpL['a', 'C(H2)(C)(O)']\n + cpL['a', 'O(H)(C)'])\n sumB[2] = (cpL['b', 'C(H3)(C)']\n + cpL['b', 'C(H2)(C)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[2] = (cpL['c', 'C(H3)(C)']\n + cpL['c', 'C(H2)(C)(O)']\n + cpL['c', 'O(H)(C)'])\n sumA[3] = (cpL['a', 'C(H3)(C)']\n + cpL['a', 'C(H2)(C2)']\n + cpL['a', 'C(H2)(C)(O)']\n + cpL['a', 'O(H)(C)'])\n sumB[3] = (cpL['b', 'C(H3)(C)']\n + cpL['b', 'C(H2)(C2)']\n + cpL['b', 'C(H2)(C)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[3] = (cpL['c', 'C(H3)(C)']\n + cpL['c', 'C(H2)(C2)']\n + cpL['c', 'C(H2)(C)(O)']\n + cpL['c', 'O(H)(C)'])\n sumA[4] = (cpL['a', 'C(H3)(C)']\n + 2 * cpL['a', 'C(H2)(C2)']\n + cpL['a', 'C(H2)(C)(O)']\n + cpL['a', 'O(H)(C)'])\n sumB[4] = (cpL['b', 'C(H3)(C)']\n + 2 * cpL['b', 'C(H2)(C2)']\n + cpL['b', 'C(H2)(C)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[4] = (cpL['c', 'C(H3)(C)']\n + 2 * cpL['c', 'C(H2)(C2)']\n + cpL['c', 'C(H2)(C)(O)']\n + cpL['c', 'O(H)(C)'])\n\n ## Methanol: component 1\n m.prop[1, 'MW'] = 32.042\n m.prop[1, 'TB'] = 337.7\n m.prop[1, 'TC'] = 512.6\n m.prop[1, 'PC'] = 80.9\n m.prop[1, 'w'] = 0.556\n m.prop[1, 'lden'] = 792e3\n m.prop[1, 'dHvap'] = 38.376e3\n m.prop[1, 'vpA'] = -8.54796\n m.prop[1, 'vpB'] = 0.76982\n m.prop[1, 'vpC'] = -3.10850\n m.prop[1, 'vpD'] = 1.54481\n m.prop[1, 'cpA', 1] = sumA[1]\n m.prop[1, 'cpB', 1] = sumB[1]\n m.prop[1, 'cpC', 1] = sumC[1]\n m.prop[1, 'cpD', 1] = 0\n m.prop[1, 'cpA', 2] = 2.115e1\n m.prop[1, 'cpB', 2] = 7.092e-2\n m.prop[1, 'cpC', 2] = 2.587e-5\n m.prop[1, 'cpD', 2] = -2.852e-8\n\n\n ## Ethanol: component 2\n m.prop[2, 'MW'] = 46.069\n m.prop[2, 'TB'] = 351.4\n m.prop[2, 'TC'] = 513.9\n m.prop[2, 'PC'] = 61.4\n m.prop[2, 'w'] = 0.644\n m.prop[2, 'lden'] = 789.3e3\n m.prop[2, 'dHvap'] = 42.698e3\n m.prop[2, 'vpA'] = -8.51838\n m.prop[2, 'vpB'] = 0.34163\n m.prop[2, 'vpC'] = -5.73683\n m.prop[2, 'vpD'] = 8.32581\n m.prop[2, 'cpA', 1] = sumA[2]\n m.prop[2, 'cpB', 1] = sumB[2]\n m.prop[2, 'cpC', 1] = sumC[2]\n m.prop[2, 'cpD', 1] = 0\n m.prop[2, 'cpA', 2] = 9.014\n m.prop[2, 'cpB', 2] = 2.141e-1\n m.prop[2, 'cpC', 2] = -8.390e-5\n m.prop[2, 'cpD', 2] = 1.373e-9\n\n\n ## Propanol: component 3\n m.prop[3, 'MW'] = 60.096\n m.prop[3, 'TB'] = 370.3\n m.prop[3, 'TC'] = 536.8\n m.prop[3, 'PC'] = 51.7\n m.prop[3, 'w'] = 0.623\n m.prop[3, 'lden'] = 804e3\n m.prop[3, 'dHvap'] = 47.763e3\n m.prop[3, 'vpA'] = -8.05594\n m.prop[3, 'vpB'] = 4.25183e-2\n m.prop[3, 'vpC'] = -7.51296\n m.prop[3, 'vpD'] = 6.89004\n m.prop[3, 'cpA', 1] = sumA[3]\n m.prop[3, 'cpB', 1] = sumB[3]\n m.prop[3, 'cpC', 1] = sumC[3]\n m.prop[3, 'cpD', 1] = 0\n m.prop[3, 'cpA', 2] = 2.47\n m.prop[3, 'cpB', 2] = 3.325e-1\n m.prop[3, 'cpC', 2] = -1.855e-4\n m.prop[3, 'cpD', 2] = 4.296e-8\n\n\n ## Butanol: component 4\n m.prop[4, 'MW'] = 74.123\n m.prop[4, 'TB'] = 390.9\n m.prop[4, 'TC'] = 563.1\n m.prop[4, 'PC'] = 44.2\n m.prop[4, 'w'] = 0.593\n m.prop[4, 'lden'] = 810e3\n m.prop[4, 'dHvap'] = 52.607e3\n m.prop[4, 'vpA'] = -8.00756\n m.prop[4, 'vpB'] = 0.53783\n m.prop[4, 'vpC'] = -9.34240\n m.prop[4, 'vpD'] = 6.68692\n m.prop[4, 'cpA', 1] = sumA[4]\n m.prop[4, 'cpB', 1] = sumB[4]\n m.prop[4, 'cpC', 1] = sumC[4]\n m.prop[4, 'cpD', 1] = 0\n m.prop[4, 'cpA', 2] = 3.266\n m.prop[4, 'cpB', 2] = 4.18e-1\n m.prop[4, 'cpC', 2] = -2.242e-4\n m.prop[4, 'cpD', 2] = 4.685e-8\n\n\n return m", "def getHourlyWeather(self, keyword, temp, last_hour):\n\n\t\t# Variables\n\t\tconditions = []\n\t\tweather = {}\n\n\t\tfio = self.helper.getFio(keyword, temp) # Getting fio object\n\n\t\tif fio.has_hourly() is True:\n\t\t\thourly = FIOHourly.FIOHourly(fio)\n\n\t\t\t# Getting weather forecast for next 12 hours\n\t\t\tfor hour in xrange(1, last_hour):\n\t\t\t\tfor item in hourly.get_hour(hour).keys():\n\t\t\t\t\t# Parsing data from hourly fio object and adding it to weather dictionary\n\t\t\t\t\tif item == \"icon\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"summary\":\n\t\t\t\t\t\tweather[item] = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"temperature\":\n\t\t\t\t\t\tif temp == \"f\":\n\t\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item]).split(\".\")[0] + \"ยฐ F\"\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item]).split(\".\")[0] + \"ยฐ C\"\n\t\t\t\t\tif item == \"humidity\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\t\tif item == \"time\":\n\t\t\t\t\t\tweather[item] = self.helper.getDateForWeather(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"precipProbability\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item] * 100).split(\".\")[0] + \"%\"\n\t\t\t\t\tif item == \"windSpeed\":\n\t\t\t\t\t\twindSpeed = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\tif item == \"windBearing\":\n\t\t\t\t\t\twindBearing = unicode(hourly.get_hour(hour)[item])\n\t\t\t\t\t\twindBearing = self.helper.convertWindBearing(windBearing)\n\t\t\t\t\t\tweather[\"wind\"] = windBearing + \" \" + windSpeed + \" mph\"\n\t\t\t\t\tif item == \"cloudCover\":\n\t\t\t\t\t\tweather[item] = str(hourly.get_hour(hour)[item] * 100).split(\".\")[0] + \"%\"\n\n\t\t\t\t# Populating conditions array with weather dicitonary\n\t\t\t\tconditions.append(weather)\n\t\t\t\tweather = {}\n\t\telse:\n\t\t\treturn 'No hourly data'\n\t\treturn conditions", "def mon_maker():\r\n random_mon = {'eyeratio':0.2, 'eyeL':30,\r\n 'mouthratio':0.8, 'mouthL':30,\r\n 'headL':40, 'headA':15,\r\n 'cheekL':25, 'cheekA':45,\r\n 'chinL': 30, 'chinA':90\r\n }\r\n return random_mon", "def Kweather():\n while True:\n hr = int(datetime.datetime.now().strftime(\"%H\"))\n if hr == 23:\n from weather import Weather, Unit\n weather = Weather(unit=Unit.CELSIUS)\n lookup = weather.lookup_by_location('Taipei')\n condition = lookup.print_obj\n code = condition[\"item\"][\"forecast\"][1][\"text\"]\n hightemp = condition[\"item\"][\"forecast\"][1][\"high\"]\n lowtemp = condition[\"item\"][\"forecast\"][1][\"low\"]\n \n print(hightemp,lowtemp,code)\n #Warning\n msg = \"\"\n if int(hightemp) > 32:\n msg = msg + \"ๆ˜Žๅคฉๆบซๅบฆ: \" + hightemp + \" ๆ—ฉไธŠๅฏ่ƒฝๆœƒๅพˆ็†ฑๅ“ฆ, ๆ•ฒ้ผปๅฏไปฅ็ฉฟๅฐ‘ไธ€้ปž \"\n if int(lowtemp) < 15:\n msg = msg + \"ๆ˜Žๅคฉๆบซๅบฆ: \" + lowtemp + \" ๆœƒๅพˆๅ†ทๅ“ฆ, ๆ•ฒ้ผป่ฆ่จ˜ๅพ—ๅคš็ฉฟไธ€้ปž\"\n if \"Rain\" in code or \"Thunder\" in code or \"Showers\" in code:\n msg = msg + \"ๆ˜Žๅคฉๆœƒไธ‹้›จ, ๆ•ฒ้ผป่จ˜ๅพ—ๅธถๅ‚˜\"\n if msg != \"\":\n print(msg)\n SendMsg(msg)\n time.sleep(60*60)", "def temperature() -> float:", "def generate_trajectories():\n\n setup_timestamp_logging()\n\n logger = logging.getLogger()\n\n substance = Substance.from_components('C(C(C(C(C(F)(F)Br)(F)F)(F)F)(F)F)(C(C(C(F)(F)F)(F)F)(F)F)(F)F')\n\n logger.info('Building system.')\n\n build_system = BuildSmirnoffSystem('build_system')\n build_system.coordinate_file_path = 'coords.pdb'\n build_system.substance = substance\n build_system.force_field_path = 'smirnoff99Frosst-1.1.0.offxml'\n build_system.execute('', None)\n\n logger.info('System built.')\n\n production_simulation = RunOpenMMSimulation(f'production_simulation')\n production_simulation.steps_per_iteration = 500\n production_simulation.output_frequency = 1\n production_simulation.timestep = 2.0 * unit.femtosecond\n production_simulation.thermodynamic_state = ThermodynamicState(temperature=298.15*unit.kelvin,\n pressure=1.0*unit.atmosphere)\n production_simulation.input_coordinate_file = 'coords.pdb'\n production_simulation.system_path = 'system.xml'\n\n compute_resources = ComputeResources(number_of_threads=4)\n\n logger.info(f'Simulation started.')\n production_simulation_schema = production_simulation.schema\n production_simulation.execute('', compute_resources)\n production_simulation.schema = production_simulation_schema\n logger.info(f'Simulation finished.')", "def make_melons(melon_types):\n\n # Fill in the rest\n melon_dict = make_melon_type_lookup(make_melon_types())\n \n all_melon_types = []\n melon_1 = Melon(melon_dict['yw'],8,7,2,'Sheila')\n melon_2 = Melon(melon_dict['yw'],3,4,2,'Sheila')\n melon_3 = Melon(melon_dict['yw'],9,8,3,'Sheila')\n melon_4 = Melon(melon_dict['cas'],10,6,35,'Sheila')\n melon_5 = Melon(melon_dict['cren'],8,9,35,'Michael')\n melon_6 = Melon(melon_dict['cren'],8,2,35,'Michael')\n melon_7 = Melon(melon_dict['cren'],2,3,4,'Michael')\n melon_8 = Melon(melon_dict['musk'],6,7,4,'Michael')\n melon_9 = Melon(melon_dict['yw'],7,10,3,'Sheila')\n\n all_melon_types.append(melon_1)\n all_melon_types.append(melon_2)\n all_melon_types.append(melon_3)\n all_melon_types.append(melon_4)\n all_melon_types.append(melon_5)\n all_melon_types.append(melon_6)\n all_melon_types.append(melon_7)\n all_melon_types.append(melon_8)\n all_melon_types.append(melon_9)\n\n return all_melon_types", "def rainfall_series(self):\n\n # assign local temporal variables\n datatype = 'strds'\n increment = str(self.rain_interval)+\" minutes\"\n raster = 'raster'\n rain_excess = 'rain_excess'\n net_difference = 'net_difference'\n #iterations = sum(1 for row in precip)\n\n # create a raster space time dataset\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.elevation_timeseries,\n title=self.elevation_title,\n description=self.elevation_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.depth_timeseries,\n title=self.depth_title,\n description=self.depth_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.erdep_timeseries,\n title=self.erdep_title,\n description=self.erdep_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.flux_timeseries,\n title=self.flux_title,\n description=self.flux_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.difference_timeseries,\n title=self.difference_title,\n description=self.difference_description,\n overwrite=True)\n\n # register the initial digital elevation model\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=self.elevation,\n start=self.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # create evolution object\n evol = Evolution(\n elevation=self.elevation,\n precipitation=self.precipitation,\n start=self.start,\n rain_intensity=self.rain_intensity,\n rain_interval=self.rain_interval,\n walkers=self.walkers,\n runoff=self.runoff,\n mannings=self.mannings,\n detachment=self.detachment,\n transport=self.transport,\n shearstress=self.shearstress,\n density=self.density,\n mass=self.mass,\n grav_diffusion=self.grav_diffusion,\n erdepmin=self.erdepmin,\n erdepmax=self.erdepmax,\n k_factor=self.k_factor,\n c_factor=self.c_factor,\n m=self.m,\n n=self.n,\n threads=self.threads,\n fill_depressions=self.fill_depressions)\n\n # open txt file with precipitation data\n with open(evol.precipitation) as csvfile:\n\n # check for header\n has_header = csv.Sniffer().has_header(csvfile.read(1024))\n\n # rewind\n csvfile.seek(0)\n\n # skip header\n if has_header:\n next(csvfile)\n\n # parse time and precipitation\n precip = csv.reader(csvfile, delimiter=',', skipinitialspace=True)\n\n # initial run\n initial = next(precip)\n evol.start = initial[0]\n evol.rain_intensity = 'rain_intensity'\n # compute rainfall intensity (mm/hr)\n # from rainfall observation (mm)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity}\"\n \"={rain_observation}\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_intensity=evol.rain_intensity,\n rain_observation=float(initial[1]),\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # run the landscape evolution model for each rainfall record\n for row in precip:\n\n # update the elevation\n evol.elevation=evolved_elevation\n\n # update time\n evol.start=row[0]\n\n # compute rainfall intensity (mm/hr)\n # from rainfall observation (mm)\n rain_intensity = 'rain_intensity'\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity}\"\n \"={rain_observation}\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_intensity=rain_intensity,\n rain_observation=float(row[1]),\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # derive excess water (mm/hr) from rainfall rate (mm/hr)\n # plus the depth (m) per rainfall interval (min)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_excess}\"\n \"={rain_intensity}\"\n \"+{depth}\"\n \"/1000.\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_excess=rain_excess,\n rain_intensity=rain_intensity,\n depth=depth,\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # update excess rainfall\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity} = {rain_excess}\".format(\n rain_intensity='rain_intensity',\n rain_excess=rain_excess),\n overwrite=True)\n evol.rain_intensity = rain_intensity\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # remove temporary maps\n gscript.run_command(\n 'g.remove',\n type='raster',\n name=['rain_excess'],\n flags='f')\n\n # compute net elevation change\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{net_difference}\"\n \"= {evolved_elevation}-{elevation}\".format(\n net_difference=net_difference,\n elevation=self.elevation,\n evolved_elevation=evol.elevation),\n overwrite=True)\n gscript.write_command(\n 'r.colors',\n map=net_difference,\n rules='-',\n stdin=difference_colors)", "def create_dataset(n_power_steps, n_initial_conditions, n_time_steps, power_system):\r\n data_ops = input_data_initialised(n_ops=n_power_steps * n_initial_conditions,\r\n power_system=power_system)\r\n\r\n power_values = np.linspace(0.0, 0.2, n_power_steps)\r\n delta_initial = np.linspace(-np.pi / 2, np.pi / 2, n_initial_conditions)\r\n\r\n power_ops_grid, delta_ops_grid = np.meshgrid(power_values, delta_initial)\r\n\r\n power_ops = power_ops_grid.reshape((-1, 1))\r\n delta_ops = delta_ops_grid.reshape((-1, 1))\r\n\r\n data_ops.update(time=np.ones(power_ops.shape) * power_system['t_max'],\r\n power=power_ops,\r\n states_initial=np.concatenate([delta_ops, power_system['omega_0'] + np.ones(delta_ops.shape) *\r\n 0.1], axis=1))\r\n\r\n data_ops = evaluate_op_trajectory(data_ops, n_time_steps=n_time_steps, power_system=power_system)\r\n\r\n data_ops = calculate_data_ode_right_hand_side(data_ops, power_system)\r\n\r\n return data_ops", "def make_temperature_map(time: u.s, field, instr, **kwargs):\n plot_settings = {'cmap': cm.get_cmap('inferno')}\n plot_settings.update(kwargs.get('plot_settings', {}))\n bins, bin_range = instr.make_detector_array(field)\n visible = is_visible(instr.total_coordinates, instr.observer_coordinate)\n hist_coordinates, _, _ = np.histogram2d(instr.total_coordinates.Tx.value,\n instr.total_coordinates.Ty.value,\n bins=(bins.x.value, bins.y.value),\n range=(bin_range.x.value, bin_range.y.value),\n weights=visible)\n with h5py.File(instr.counts_file, 'r') as hf:\n try:\n i_time = np.where(u.Quantity(hf['time'],\n get_keys(hf['time'].attrs), ('unit', 'units')) == time)[0][0]\n except IndexError:\n raise IndexError(f'{time} is not a valid time in observing time for {instr.name}')\n weights = np.array(hf['electron_temperature'][i_time, :])\n units = u.Unit(get_keys(hf['electron_temperature'].attrs, ('unit', 'units')))\n hist, _, _ = np.histogram2d(instr.total_coordinates.Tx.value,\n instr.total_coordinates.Ty.value,\n bins=(bins.x.value, bins.y.value),\n range=(bin_range.x.value, bin_range.y.value),\n weights=weights * visible)\n hist /= np.where(hist_coordinates == 0, 1, hist_coordinates)\n meta = instr.make_fits_header(field, instr.channels[0])\n del meta['wavelnth']\n del meta['waveunit']\n meta['bunit'] = units.to_string()\n meta['detector'] = 'Electron Temperature'\n meta['comment'] = 'Column-averaged electron temperature calculated by synthesizAR'\n\n return GenericMap(hist.T, meta, plot_settings=plot_settings)", "def __init__(self, alt=0, temp_offset=0):\n\t\tWorkingAtmosphere.__init__(self, alt)\n\t\t#self.temperature_offset = tOffset\n\t\tself.Temperature_offset = temp_offset\n\t\tself.make_environment()", "def thermodynamic_temperature(frequency, T_cmb=None):\n nu = frequency.to(si.GHz, spectral())\n\n if T_cmb is None:\n from astropy.cosmology import default_cosmology\n\n T_cmb = default_cosmology.get().Tcmb0\n\n def f(nu, T_cmb=T_cmb):\n x = _si.h * nu / _si.k_B / T_cmb\n return x**2 * np.exp(x) / np.expm1(x) ** 2\n\n def convert_Jy_to_K(x_jybm):\n factor = (f(nu) * 2 * _si.k_B * si.K * nu**2 / _si.c**2).to_value(\n astrophys.Jy\n )\n return x_jybm / factor\n\n def convert_K_to_Jy(x_K):\n factor = (astrophys.Jy / (f(nu) * 2 * _si.k_B * nu**2 / _si.c**2)).to_value(\n si.K\n )\n return x_K / factor\n\n return Equivalency(\n [(astrophys.Jy / si.sr, si.K, convert_Jy_to_K, convert_K_to_Jy)],\n \"thermodynamic_temperature\",\n {\"frequency\": frequency, \"T_cmb\": T_cmb},\n )", "def get_phoenix_atmosphere(metallicity=0, temperature=5000, gravity=4):\n sp = pysynphot.Icat('phoenix', temperature, metallicity, gravity)\n\n # Do some error checking\n idx = np.where(sp.flux != 0)[0]\n if len(idx) == 0:\n print( 'Could not find PHOENIX BT-Settl (Allard+ 2011 atmosphere model for')\n print( ' temperature = %d' % temperature)\n print( ' metallicity = %.1f' % metallicity)\n print( ' log gravity = %.1f' % gravity)\n\n return sp", "def T_naught(z, h, OM, OB):\n\n T0 = 28.5 * ((1.0+z)/10.0)**(0.5) * OB/0.042 * h/0.73 * (0.24/OM)**(0.5)\n return T0", "def process_weather(forecast_file):\n # Load json data file\n \n with open(forecast_file) as json_file:\n json_data = json.load(json_file)\n \n # Set Variables, Dictionaries and Lists\n days_list = []\n temp_dict = {}\n daily_dict = {}\n\n num_items = 0\n total_sum_min = 0\n total_sum_max = 0\n days = len(json_data['DailyForecasts'])\n days_list = days_in_data(days)\n\n t_temp_min = 100\n t_temp_max = 0\n\n # Pull through the data\n\n for day in days_list:\n num_items += 1\n date = convert_date(json_data['DailyForecasts'][day]['Date'])\n min_temp = convert_f_to_c(json_data['DailyForecasts'][day]['Temperature']['Minimum']['Value'])\n total_sum_min += min_temp\n max_temp = convert_f_to_c(json_data['DailyForecasts'][day]['Temperature']['Maximum']['Value'])\n total_sum_max += max_temp\n day_desc = json_data['DailyForecasts'][day]['Day']['LongPhrase']\n chance_rain_day = json_data['DailyForecasts'][day]['Day']['RainProbability']\n night_desc = json_data['DailyForecasts'][day]['Night']['LongPhrase']\n chance_rain_night = json_data['DailyForecasts'][day]['Night']['RainProbability']\n \n if min_temp < t_temp_min:\n t_temp_min = min_temp\n t_temp_mindate = date\n else:\n pass\n if max_temp > t_temp_max:\n t_temp_max = max_temp\n t_temp_maxdate = date\n else:\n pass\n \n daily_dict[day] = [date, min_temp, max_temp, day_desc, chance_rain_day, night_desc, chance_rain_night]\n # 0 1 2 3 4 5 6 \n \n # print(temp_dict)\n # print(daily_dict)\n\n # Calculate Minimum, Maximum and Mean temperatures\n\n mean_min = format_temperature(calculate_mean(total_sum_min, num_items))\n # print(mean_min)\n mean_max = format_temperature(calculate_mean(total_sum_max, num_items))\n # print(mean_max)\n\n # Format Minimum and Maximum temperatures\n min_temp_format = format_temperature(t_temp_min)\n max_temp_format = format_temperature(t_temp_max)\n\n ##############################################################################################\n\n # Combine string messages to return to user\n\n str_Output = \"\"\n Output_gen1 = (f\"{num_items} Day Overview\\n\")\n Output_gen2 = (f\" The lowest temperature will be {min_temp_format}, and will occur on {t_temp_mindate}.\\n\")\n Output_gen3 = (f\" The highest temperature will be {max_temp_format}, and will occur on {t_temp_maxdate}.\\n\")\n Output_gen4 = (f\" The average low this week is {mean_min}.\\n\")\n Output_gen5 = (f\" The average high this week is {mean_max}.\\n\")\n str_Output = Output_gen1 + Output_gen2 + Output_gen3 + Output_gen4 + Output_gen5\n for key, value in daily_dict.items():\n Output_daily0 = (\"\\n\")\n Output_daily1 = (f\"-------- {value[0]} --------\\n\")\n Output_daily2 = (f\"Minimum Temperature: {format_temperature(value[1])}\\n\")\n Output_daily3 = (f\"Maximum Temperature: {format_temperature(value[2])}\\n\")\n Output_daily4 = (f\"Daytime: {value[3]}\\n\")\n Output_daily5 = (f\" Chance of rain: {value[4]}%\\n\")\n Output_daily6 = (f\"Nighttime: {value[5]}\\n\")\n Output_daily7 = (f\" Chance of rain: {value[6]}%\\n\")\n str_Output = str_Output + Output_daily0 + Output_daily1 + Output_daily2 + Output_daily3 + Output_daily4 + Output_daily5 + Output_daily6 + Output_daily7\n str_Output = str_Output +\"\\n\"\n\n return str_Output", "def get_properties_technical_systems(locator, prop_hvac):\n\n prop_emission_heating = pd.read_excel(locator.get_database_air_conditioning_systems(), 'HEATING')\n prop_emission_cooling = pd.read_excel(locator.get_database_air_conditioning_systems(), 'COOLING')\n prop_emission_dhw = pd.read_excel(locator.get_database_air_conditioning_systems(), 'HOT_WATER')\n prop_emission_control_heating_and_cooling = pd.read_excel(locator.get_database_air_conditioning_systems(),\n 'CONTROLLER')\n prop_ventilation_system_and_control = pd.read_excel(locator.get_database_air_conditioning_systems(), 'VENTILATION')\n df_emission_heating = prop_hvac.merge(prop_emission_heating, left_on='type_hs', right_on='code')\n df_emission_cooling = prop_hvac.merge(prop_emission_cooling, left_on='type_cs', right_on='code')\n df_emission_control_heating_and_cooling = prop_hvac.merge(prop_emission_control_heating_and_cooling,\n left_on='type_ctrl', right_on='code')\n df_emission_dhw = prop_hvac.merge(prop_emission_dhw, left_on='type_dhw', right_on='code')\n df_ventilation_system_and_control = prop_hvac.merge(prop_ventilation_system_and_control, left_on='type_vent',\n right_on='code')\n fields_emission_heating = ['Name', 'type_hs', 'type_cs', 'type_dhw', 'type_ctrl', 'type_vent', 'heat_starts',\n 'heat_ends', 'cool_starts', 'cool_ends', 'class_hs', 'convection_hs',\n 'Qhsmax_Wm2', 'dThs_C', 'Tshs0_ahu_C', 'dThs0_ahu_C', 'Th_sup_air_ahu_C', 'Tshs0_aru_C',\n 'dThs0_aru_C', 'Th_sup_air_aru_C', 'Tshs0_shu_C', 'dThs0_shu_C']\n fields_emission_cooling = ['Name', 'Qcsmax_Wm2', 'dTcs_C', 'Tscs0_ahu_C', 'dTcs0_ahu_C', 'Tc_sup_air_ahu_C',\n 'Tscs0_aru_C', 'dTcs0_aru_C', 'Tc_sup_air_aru_C', 'Tscs0_scu_C', 'dTcs0_scu_C',\n 'class_cs', 'convection_cs']\n fields_emission_control_heating_and_cooling = ['Name', 'dT_Qhs', 'dT_Qcs']\n fields_emission_dhw = ['Name', 'Tsww0_C', 'Qwwmax_Wm2']\n fields_system_ctrl_vent = ['Name', 'MECH_VENT', 'WIN_VENT', 'HEAT_REC', 'NIGHT_FLSH', 'ECONOMIZER']\n\n result = df_emission_heating[fields_emission_heating].merge(df_emission_cooling[fields_emission_cooling],\n on='Name').merge(\n df_emission_control_heating_and_cooling[fields_emission_control_heating_and_cooling],\n on='Name').merge(df_emission_dhw[fields_emission_dhw],\n on='Name').merge(df_ventilation_system_and_control[fields_system_ctrl_vent], on='Name')\n # verify hvac and ventilation combination\n verify_hvac_system_combination(result, locator)\n # read region-specific control parameters (identical for all buildings), i.e. heating and cooling season\n result['has-heating-season'] = result.apply(lambda x: verify_has_season(x['Name'],\n x['heat_starts'],\n x['heat_ends']), axis=1)\n result['has-cooling-season'] = result.apply(lambda x: verify_has_season(x['Name'],\n x['cool_starts'],\n x['cool_ends']), axis=1)\n\n # verify seasons do not overlap\n result['overlap-season'] = result.apply(lambda x: verify_overlap_season(x['Name'],\n x['has-heating-season'],\n x['has-cooling-season'],\n x['heat_starts'],\n x['heat_ends'],\n x['cool_starts'],\n x['cool_ends']), axis=1)\n return result", "def set_temperature(world_map, sea_level):\n print(\"- Processing temperature\")\n temperature_map = world_map['temperature']\n elevation_map = world_map['elevation']\n size_y = world_map.shape[1]\n\n for (x, y), z in np.ndenumerate(temperature_map):\n latitude = y / size_y\n elevation = elevation_map[x, y]\n\n value = math.e ** (-5 * (latitude ** 2))\n temperature_map[x, y] = value * get_temperature(elevation, sea_level)", "def simulate(init_dict, unobserved=False):\n\n # Antibugging\n assert (isinstance(init_dict, dict))\n assert (unobserved in [True, False])\n\n # Ensure recomputability\n np.random.seed(123)\n\n # Distribute information\n num_agents = init_dict['BASICS']['agents']\n source = init_dict['BASICS']['source']\n\n Y1_coeffs = init_dict['TREATED']['all']\n Y0_coeffs = init_dict['UNTREATED']['all']\n\n C_coeffs = np.array(init_dict['COST']['all'])\n\n U1_sd = init_dict['TREATED']['sd']\n U0_sd = init_dict['UNTREATED']['sd']\n\n V_sd = init_dict['COST']['sd']\n\n U1V_rho = init_dict['DIST']['rho1']\n U0V_rho = init_dict['DIST']['rho0']\n\n # Auxiliary objects\n U1V_cov = U1V_rho * U1_sd * V_sd\n U0V_cov = U0V_rho * U0_sd * V_sd\n\n num_covars_out = Y1_coeffs.shape[0]\n num_covars_cost = C_coeffs.shape[0]\n\n # Simulate observables\n means = np.tile(0.0, num_covars_out)\n covs = np.identity(num_covars_out)\n\n X = np.random.multivariate_normal(means, covs, num_agents)\n\n means = np.tile(0.0, num_covars_cost)\n covs = np.identity(num_covars_cost)\n\n Z = np.random.multivariate_normal(means, covs, num_agents)\n\n # Add intercepts. The first column of the X and Z matrix always contains\n # the intercept term. This is exploited throughout the code.\n Z[:,0], X[:, 0] = 1.0, 1.0\n\n # Construct index of observable characteristics\n Y1_level = np.dot(Y1_coeffs, X.T)\n Y0_level = np.dot(Y0_coeffs, X.T)\n C_level = np.dot(C_coeffs, Z.T)\n\n # Simulate unobservables\n means = np.tile(0.0, 3)\n vars_ = [U1_sd**2, U0_sd**2, V_sd**2]\n covs = np.diag(vars_)\n\n covs[0, 2] = U1V_cov\n covs[2, 0] = covs[0, 2]\n\n covs[1, 2] = U0V_cov\n covs[2, 1] = covs[1, 2]\n\n U = np.random.multivariate_normal(means, covs, num_agents)\n\n # Simulate endogenous variables\n Y1 = np.tile(np.nan, num_agents)\n Y0 = np.tile(np.nan, num_agents)\n Y = np.tile(np.nan, num_agents)\n\n D = np.tile(np.nan, num_agents)\n\n for i in range(num_agents):\n\n # Select individual unobservables and observables\n u1, u0, v = U[i, 0], U[i, 1], U[i, 2]\n\n y1_idx, y0_idx, c_idx = Y1_level[i], Y0_level[i], C_level[i]\n\n # Decision Rule\n expected_benefits = y1_idx - y0_idx\n cost = c_idx + v\n\n d = np.float((expected_benefits - cost > 0))\n\n # Potential outcomes\n y1, y0 = y1_idx + u1, y0_idx + u0\n\n # Observed outcomes\n y = d * y1 + (1.0 - d) * y0\n\n # Collect data matrices\n Y[i], Y0[i], Y1[i], D[i] = y, y1, y0, d\n\n # Check integrity of simulated data\n _check_integrity_simulate(Y1, Y0, Y, D)\n\n # Save to disk\n _write_out(Y, D, X, Z, source, unobserved, Y1, Y0)\n\n # Return selected features of data\n return Y1, Y0, D", "def power_output_candidate_thermal_rule(_m, g, y, s, t):\r\n\r\n if y != m.Y.last() and t != m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_3[g, y, s, t]\r\n + m.sigma_20[g, y, s, t] - m.sigma_20[g, y, s, t + 1]\r\n - m.sigma_23[g, y, s, t] + m.sigma_23[g, y, s, t + 1]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n elif y != m.Y.last() and t == m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_3[g, y, s, t]\r\n + m.sigma_20[g, y, s, t]\r\n - m.sigma_23[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n elif y == m.Y.last() and t != m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_3[g, y, s, t]\r\n + m.sigma_20[g, y, s, t] - m.sigma_20[g, y, s, t + 1]\r\n - m.sigma_23[g, y, s, t] + m.sigma_23[g, y, s, t + 1]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (1 + (1 / m.INTEREST_RATE)) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n elif y == m.Y.last() and t == m.T.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_3[g, y, s, t]\r\n + m.sigma_20[g, y, s, t]\r\n - m.sigma_23[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (1 + (1 / m.INTEREST_RATE)) * (\r\n m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])))\r\n == 0)\r\n\r\n else:\r\n raise Exception(f'Unhandled case: {g, y, s, t}')", "def getTemperatureMeasurements(self):\n # self.board.readline()\n self.stop = False\n times = []\n temps = [[], [], []]\n \n # A synchronisation string containing the characters tx is sent before each set of measurements,\n # we ensure correct reading of the measurements by waiting for this string\n while str(self.board.readline()).strip('b\\'\\\\rn') != 'tx':\n pass\n \n while not self.stop:\n # A synchronisation string containing the characters tx is sent before each set of measurements\n tx = self.board.readline()\n if str(tx).strip('b\\'\\\\rn') == 'tx':\n rawData1 = self.board.readline()\n rawData2 = self.board.readline()\n rawData3 = self.board.readline()\n rawData4 = self.board.readline()\n \n \n timeStamp = str(rawData1).strip('b\\'\\\\rn')\n temp1 = str(rawData2).strip('b\\'\\\\rn')\n temp2 = str(rawData3).strip('b\\'\\\\rn')\n temp3 = str(rawData4).strip('b\\'\\\\rn')\n try:\n times.append(float(timeStamp) / 1000)\n temps[0].append(float(temp1) / 128)\n temps[1].append(float(temp2) / 128)\n temps[2].append(float(temp3) / 128)\n # print(f'\\rtime: {float(timeStamp) / 1000:.2f} s, Temperature measured on sensor 1: {float(temp1) / 128:.2f} ยฐC,'\n # f'sensor 2: {float(temp2) / 128:.2f} ยฐC, sensor 3: {float(temp3) / 128:.2f} ยฐC', sep='', end='', flush=True)\n except:\n print(rawData1, rawData2, rawData3, rawData4)\n \n \n if self.stop:\n print('\\nMeasurement finished...')\n \n self.data_stack[self.fetch_kinds[0]] = times\n self.data_stack[self.fetch_kinds[1]] = temps[0]\n self.data_stack[self.fetch_kinds[2]] = temps[1]\n self.data_stack[self.fetch_kinds[3]] = temps[2]\n \n if (len(self.data_stack['Sensor 1 Temp']) != len(times) or len(self.data_stack['Sensor 2 Temp']) != len(times) or len(self.data_stack['Sensor 3 Temp']) != len(times)):\n print(\"Warning: There may be some missing values!\")", "def test_electronic_type(get_pdos_generator_inputs):\n with pytest.raises(NotImplementedError):\n builder = PdosWorkChain.get_builder_from_protocol(\n **get_pdos_generator_inputs, electronic_type=ElectronicType.AUTOMATIC\n )\n builder = PdosWorkChain.get_builder_from_protocol(\n **get_pdos_generator_inputs, electronic_type=ElectronicType.INSULATOR\n )\n for namespace, occupations in zip((builder.scf, builder.nscf), ('fixed', 'tetrahedra')):\n parameters = namespace['pw']['parameters'].get_dict()\n assert parameters['SYSTEM']['occupations'] == occupations\n assert 'degauss' not in parameters['SYSTEM']\n assert 'smearing' not in parameters['SYSTEM']", "def generate_tpx_features():\n\n\tlabels = get_tpx_labels()\n\tlabels_abs = get_tpx_labels_abs()\n\tlabels_rel = get_tpx_labels_rel()\n\tlabels_prop = get_tpx_labels_prop()\n\tlabels_special = get_tpx_labels_special()\n\t\n\tlabels.append(\"num_words\")\n\n\t# read existing metadata\n\tmd_table = pd.DataFrame.from_csv(wdir + md_csv, header=0)\n\tidnos = md_table.idno\n\n\t# create new data frame\n\tht_fr = pd.DataFrame(columns=labels, index=idnos)\n\t \n\t# XPath expressions for TimeML requests\n\tnamespaces = {'tei':'http://www.tei-c.org/ns/1.0'}\n\n\txpaths = get_tpx_xpaths()\n\n\t# loop through files to get HeidelTime results, first step: absolute values\n\t# subsequent steps build on absolute values\n\tfor file in glob.glob(ht_inpath):\n\t\t\n\t\tidno = os.path.basename(file)[0:6]\n\t\txml = etree.parse(file)\n\t\t\n\t\tresult = 0\n\t\t# calculate absolute feature values\n\t\tfor label in labels_abs + labels_special:\n\t\t\t\n\t\t\tif label in xpaths:\n\t\t\t\t# apply xpaths if present\n\t\t\t\txpath = xpaths[label]\n\t\t\t\tresult = xml.xpath(xpath, namespaces=namespaces)\n\t\t\t\t\n\t\t\telse:\n\t\t\t\t# calculate features which cannot be determined directly with XPath\n\t\t\t\txpath_dates = \"//TIMEX3[@type='DATE']/@value\"\n\t\t\t\tdates = xml.xpath(xpath_dates, namespaces=namespaces)\n\t\t\t\t\n\t\t\t\t# temporal distance between mentioned years and publication year of the novel\n\t\t\t\tif (label == \"temp_dist\"):\n\t\t\t\t\t# get all date expressions with a year\n\t\t\t\t\tyears = []\n\t\t\t\t\tfor date in dates:\n\t\t\t\t\t\tif re.match(r\"^\\d{4}-\\d{2}-\\d{2}\", date): # only year: bad results\n\t\t\t\t\t\t\tyears.append(date.split(\"-\")[0])\n\t\t\t\t\t# get the median of the years mentioned in the text\n\t\t\t\t\tif years:\n\t\t\t\t\t\tyears = np.array(years).astype(np.float)\n\t\t\t\t\t\n\t\t\t\t\t\tmed = np.median(years) #median\n\t\t\t\t\t\t# get publication year\n\t\t\t\t\t\tpub_year = md_table.loc[idno,\"year\"]\n\t\t\t\t\t\t# calculate the difference\n\t\t\t\t\t\tresult = round(pub_year - med)\n\t\t\t\t\telse:\n\t\t\t\t\t\tresult = float(\"NaN\")\n\t\t\t\t\t\n\t\t\t\t# counts related to chapters\n\t\t\t\telif (label == \"tpx_date_any_chapter_first_abs\" or label == \"tpx_date_any_chapter_other_mean_abs\" or label == \"tpx_date_any_chapter_other_abs\"):\n\t\t\t\t\tdates_ch = []\n\t\t\t\t\txpaths_chapter = {\"tpx_date_any_chapter_first_abs\" : \"//TIMEX3[@type='DATE'][substring(ancestor::tei:div/@xml:id,(string-length(ancestor::tei:div/@xml:id) - 1),2) ='d1']/@value\",\n\t\t\t\t\t\t\t\t\t\t\"tpx_date_any_chapter_other_abs\" : \"//TIMEX3[@type='DATE'][substring(ancestor::tei:div/@xml:id,(string-length(ancestor::tei:div/@xml:id) - 1),2) !='d1']/@value\",\n\t\t\t\t\t\t\t\t\t\t\"tpx_date_any_chapter_other_mean_abs\" : \"//TIMEX3[@type='DATE'][substring(ancestor::tei:div/@xml:id,(string-length(ancestor::tei:div/@xml:id) - 1),2) !='d1']/@value\",\n\t\t\t\t\t\t\t\t\t\t\"chapters\" : \"//wrapper\"\n\t\t\t\t\t}\n\t\t\t\t\tchapter_dates = []\n\t\t\t\t\tchapter_dates = xml.xpath(xpaths_chapter[label], namespaces=namespaces)\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t# filter: just \"any-dates\"\n\t\t\t\t\tfor date in chapter_dates:\n\t\t\t\t\t\tif re.match(r\"^\\d{2,4}\", date) or re.match(r\"^.{2,4}-\\d{2}\", date) or re.match(r\"^.{2,4}-.{2}-\\d{2}\", date):\n\t\t\t\t\t\t\tdates_ch.append(date)\n\t\t\t\t\t\n\t\t\t\t\tif (label == \"tpx_date_any_chapter_first_abs\" or label == \"tpx_date_any_chapter_other_abs\"):\n\t\t\t\t\t\t# return all the dates from the first / other chapters\n\t\t\t\t\t\tresult = len(dates_ch)\n\t\t\t\t\telif label == \"tpx_date_any_chapter_other_mean_abs\":\n\t\t\t\t\t\t# calculate the mean of the other chapters\n\t\t\t\t\t\tchapters = xml.xpath(xpaths_chapter[\"chapters\"])\n\t\t\t\t\t\t\n\t\t\t\t\t\tif len(chapters) <= 1:\n\t\t\t\t\t\t\traise ValueError(\"The novel \" + idno + \" has less than 2 chapters!\")\n\t\t\t\t\t\tresult = len(dates_ch) / (len(chapters) - 1)\n\t\t\t\t\t\n\t\t\t\t\n\t\t\t\t# remaining temporal expression features\t\n\t\t\t\telse:\n\t\t\t\t\tdate_counts = []\n\t\t\t\t\tfor date in dates:\n\t\t\t\t\t\tif (label == \"tpx_date_none_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^\\D+$\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_year_abs\"):\n\t\t\t\t\t\t\t#if re.match(r\"^\\d{2,4}\", date): fรผr alle Jahre geรคndert\n\t\t\t\t\t\t\tif re.match(r\"^\\d{4}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_year_month_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^\\d{4}-\\d{2}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_month_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^.{4}-\\d{2}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_day_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^.{4}-.{2}-\\d{2}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_month_day_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^.{4}-\\d{2}-\\d{2}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_any_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^\\d{4}\", date) or re.match(r\"^.{4}-\\d{2}\", date) or re.match(r\"^.{4}-.{2}-\\d{2}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\t\tif (label == \"tpx_date_full_abs\"):\n\t\t\t\t\t\t\tif re.match(r\"^\\d{4}-\\d{2}-\\d{2}\", date):\n\t\t\t\t\t\t\t\tdate_counts.append(date)\n\t\t\t\t\n\t\t\t\t\tresult = len(date_counts)\n\t\t\t\t\t\n\t\t\t\n\t\t\t# check the results of XPath\n\t\t\t\"\"\"\n\t\t\tif math.isnan(result):\n\t\t\t\tresult = \"is not a number\"\n\t\t\t\"\"\"\n\t\t\t\n\t\t\t# Write the result into the data frame\n\t\t\tht_fr.loc[idno,label] = result\n\t\t\t\n\t\t\t\n\t# second step: relative values (relative to the total number of words in the text)\n\tfor file in glob.glob(ht_inpath):\n\t\t\n\t\tidno = os.path.basename(file)[0:6]\n\t\t\n\t\t# calculate total number of words in the text\n\t\tnum_words = 0\n\t\txml = etree.parse(file)\n\t\t# get XML snippets chapterwise\n\t\twrappers = xml.xpath(\"//wrapper//text()\")\n\t\tfor wrap in wrappers:\n\t\t\t\n\t\t\t# tokenize and count\n\t\t\twords = re.split(r\"[\\s\\n]+\", wrap)\n\t\t\tnum_words += len(words)\n\t\t\n\t\tht_fr.loc[idno,\"num_words\"] = num_words\n\t\t\n\t\t\n\t\tfor label in labels_rel:\n\t\t\t# set corresponding absolute value label\n\t\t\tlabel_abs = label[:-3] + \"abs\"\n\t\t\t\n\t\t\t# fetch absolute value\n\t\t\tabs_val = ht_fr.loc[idno,label_abs]\n\t\t\t\n\t\t\t# check data type\n\t\t\tif math.isnan(abs_val):\n\t\t\t\tresult = abs_val\n\t\t\telse:\n\t\t\t\t# calculate relative value\n\t\t\t\tresult = abs_val / num_words\n\t\t\t\n\t\t\t\n\t\t\t# Write the result into the data frame\n\t\t\tht_fr.loc[idno,label] = result\n\t\t\t\n\n\t# third step: calculate proportions\n\tfor file in glob.glob(ht_inpath):\n\t\t\n\t\tidno = os.path.basename(file)[0:6]\n\t\ttpx_all = ht_fr.loc[idno,\"tpx_all_abs\"]\n\t\ttpx_all_one = tpx_all / 100\n\t\t\n\t\tfor label in labels_prop:\n\t\t\t# set corresponding absolute value label\n\t\t\tlabel_abs = label[:-4] + \"abs\"\n\t\t\t\n\t\t\t# fetch absolute value\n\t\t\tabs_val = ht_fr.loc[idno,label_abs]\n\t\t\t\n\t\t\t# check data type\n\t\t\tif math.isnan(abs_val):\n\t\t\t\tresult = abs_val\n\t\t\telse:\n\t\t\t\t# calculate proportion\n\t\t\t\tresult = abs_val / tpx_all_one\n\t\t\t\n\t\t\t# Write the result into the data frame\n\t\t\tht_fr.loc[idno,label] = result\n\t\t\n\t# fรผr FJR: absolute Werte weglassen\n\tfor label in labels_abs:\n\t\tht_fr = ht_fr.drop(label, axis=1)\n\tht_fr = ht_fr.drop(\"temp_dist\", axis=1)\n\tht_fr = ht_fr.drop(\"num_words\", axis=1)\n\t\t\n\tht_fr.to_csv(wdir + \"tpx-corpus-counts.csv\", sep=\",\", header=True)\n\n\tprint(\"Done: generate tpx features\")", "def zernikeHexapodTrend(mnts='M20'):\n Tfile='/home/jghao/research/decamFocus/psf_withseeing/finerGrid_coeff_matrix/zernike_coeff_finerGrid_training.cp'\n b=p.load(open(Tfile))\n nobs = len(b)\n x = b[:,0]\n y = b[:,1]\n z = b[:,2]\n theta = b[:,3]\n phi = b[:,4]\n fwhm = b[:,5]\n e1 = b[:,6]\n e2 = b[:,7]\n thetax = theta*np.cos(np.deg2rad(phi))\n thetay = theta*np.sin(np.deg2rad(phi))\n if mnts == 'M20':\n idxBase = 9\n if mnts == 'M22real':\n idxBase = 29\n if mnts == 'M22imag':\n idxBase = 49\n idx = np.arange(14)\n zernikeName=('Piston','Tip','Tilt','Astignism','Defocus','Astignism','Trefoil','Coma','Coma','Trefoil','Ashtray','Astigm.5th','Spherical','Astigm.5th','Ashtray','16','17','18','19','20')\n for i in range(14):\n pl.figure(figsize=(21,10))\n pl.subplot(2,3,1)\n bp.bin_scatter(x,b[:,idxBase+idx[i]],binsize=0.01,fmt='bo',scatter=True)\n pl.xlabel('x decenter')\n pl.ylabel(zernikeName[i+1])\n pl.title(mnts)\n pl.subplot(2,3,2)\n bp.bin_scatter(y,b[:,idxBase+idx[i]],binsize=0.01,fmt='bo',scatter=True)\n pl.xlabel('y decenter')\n pl.ylabel(zernikeName[i+1])\n pl.title(mnts)\n pl.subplot(2,3,3)\n bp.bin_scatter(z,b[:,idxBase+idx[i]],binsize=0.01,fmt='bo',scatter=True)\n pl.xlabel('z-defocus')\n pl.ylabel(zernikeName[i+1])\n pl.title(mnts)\n pl.subplot(2,3,4)\n bp.bin_scatter(thetax,b[:,idxBase+idx[i]],binsize=5,fmt='bo',scatter=True)\n pl.xlabel('x-tilt')\n pl.ylabel(zernikeName[i+1])\n pl.title(mnts)\n pl.subplot(2,3,5)\n bp.bin_scatter(thetay,b[:,idxBase+idx[i]],binsize=5,fmt='bo',scatter=True)\n pl.xlabel('y-tilt')\n pl.ylabel(zernikeName[i+1])\n pl.title(mnts)\n pl.savefig(mnts+'_'+str(i+1)+'_'+zernikeName[i+1]+'.png')\n pl.close()", "def initialize_thermal_prediction(self, config_file):\n conf_pred = config_file['prediction']['heat']\n conf_powr = config_file['prediction']['power']\n # config_json\n n_day = conf_pred['n_day']\n n_values = conf_pred['n_values_per_day']\n precision_in_h = conf_pred['precision_in_h']\n use_predef_loads = conf_pred['use_predef_loads']\n predef_loads_file_path = conf_pred['path_loads']\n # heating curve\n conf_hk = config_file['components']['heating_curve']\n hk_ta = conf_hk['design_ambient_temperature_oC']\n hk_ti = conf_hk['design_indoor_temperature_oC']\n hk_tv = conf_hk['design_supply_temperature_oC']\n hk_tr = conf_hk['design_return_temperature_oC']\n hk_n = conf_hk['radiator_coefficient_n']\n hk_m = conf_hk['radiator_coefficient_m']\n hk_qn = conf_hk['design_heat_load_in_kW']\n # chp unit\n patm = utils.get_pressure_in_MPa()\n calcopt = utils.get_calc_option()\n eps_el_chp = config_file['components']['chp_unit']['electrical_efficiency']\n eps_th_chp = config_file['components']['chp_unit']['thermal_efficiency']\n qel_n_chp = config_file['components']['chp_unit']['max_electric_power_in_kW']\n chp_tinp = config_file['components']['chp_unit']['design_input_temperature_oC']\n chp_tmax = config_file['components']['chp_unit']['design_output_temperature_oC']\n qth_n_chp = eps_th_chp * qel_n_chp / eps_el_chp # in kW\n mstr_chp = qth_n_chp / (utils.cp_fluid_water(0.5 * (chp_tmax + chp_tinp), patm, calcopt) * (chp_tmax - chp_tinp)) # in kg/s = kW / (kJ/kg/K * K)\n # gas boiler\n qth_n_gb = config_file['components']['gas_boiler']['max_thermal_power_in_kW']\n gb_tinp = config_file['components']['gas_boiler']['design_input_temperature_oC']\n gb_tmax = config_file['components']['gas_boiler']['design_output_temperature_oC']\n mstr_gb = qth_n_gb / (utils.cp_fluid_water(0.5 * (gb_tinp + gb_tmax), patm, calcopt) * (gb_tmax - gb_tinp)) # in kg/s = kW / (kJ/kg/K * K) # in kg/s = kW / (kJ/kg/K * K)\n # storage tank\n effective_height = config_file['components']['storage_tank']['effective_heigth_in_m']\n inner_radius = config_file['components']['storage_tank']['inner_radius_tank_in_m']\n effective_pipe_volume = config_file['components']['storage_tank']['effective_coil_volume_in_m3']\n effective_volume = config_file['components']['storage_tank']['effective_volume_in_m3']\n if (effective_volume <= 0.0):\n effective_volume = math.pi * inner_radius * inner_radius * effective_height - effective_pipe_volume # in m3\n nr_calc = 20\n slice_volume = effective_volume / nr_calc # in m3\n qmax_rod_el = config_file['components']['storage_tank']['power_heating_rod_in_kW']\n open_weather_map_active = config_file['calculation']['platform_mode']['open_weather_map_active']\n # conf_powr\n #print('\\n initialize_thermal_prediction')\n #print('use_predef_loads = {}; {}'.format(use_predef_loads,type(use_predef_loads)))\n #print('predef_loads_file_path = {}; {}'.format(predef_loads_file_path,type(predef_loads_file_path)))\n return predict_thermal.predict_Q(n_day, n_values, precision_in_h, predef_loads_file_path, use_predef_loads, self.output_horizon_in_h, \n self.output_resolution_in_s, conf_powr, hk_tv, hk_tr, hk_ti, hk_ta, hk_qn, hk_n, hk_m, chp_tmax, gb_tmax, slice_volume, \n mstr_chp, mstr_gb, qmax_rod_el, eps_th_chp, eps_el_chp, open_weather_map_active)", "def draw_weather_analysis(date_obj, data, map_region, return_dict):\n\n # image dictionary\n images = collections.OrderedDict()\n return_dict[0] = None\n\n # draw 2PVU surface pressure\n image = pv.draw_pres_pv2(\n data['pres_pv2'].values, data['pres_pv2']['lon'].values, data['pres_pv2']['lat'].values,\n map_region=map_region, title_kwargs={'name':'CFSR', 'time': date_obj})\n images['2PVU_Surface_Pressure'] = image\n\n # draw 200hPa wind field\n image = dynamics.draw_wind_upper(\n data['u200'].values, data['v200'].values, \n data['u200']['lon'].values, data['u200']['lat'].values,\n gh=data['gh200'].values, map_region=map_region, \n title_kwargs={'name':'CFSR', 'head': \"200hPa Wind | GH\", 'time': date_obj})\n images['200hPa_Wind'] = image\n\n # draw 500hPa height and temperature\n image = dynamics.draw_height_temp(\n data['gh500'].values, data['t500'].values, \n data['gh500']['lon'].values, data['gh500']['lat'].values, map_region=map_region, \n title_kwargs={'name':'CFSR', 'head': \"500hPa GH | T\", 'time': date_obj})\n images['500hPa_Height'] = image\n\n # draw 500hPa vorticity\n image = dynamics.draw_vort_high(\n data['u500'].values, data['v500'].values, \n data['u500']['lon'].values, data['u500']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"500hPa Wind | Vorticity | GH\", 'time': date_obj})\n images['500hPa_Vorticity'] = image\n\n # draw 700hPa vertical velocity\n image = dynamics.draw_vvel_high(\n data['u700'].values, data['v700'].values, data['w700'].values, \n data['w700']['lon'].values, data['w700']['lat'].values,\n gh=data['gh700'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"700hPa Vertical Velocity | Wind | GH\", 'time': date_obj})\n images['700hPa_Vertical_Velocity'] = image\n\n # draw 700hPa wind field\n image = dynamics.draw_wind_high(\n data['u700'].values, data['v700'].values, \n data['u700']['lon'].values, data['u700']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"700hPa Wind | 500hPa GH\", 'time': date_obj})\n images['700hPa_Wind'] = image\n\n # draw 700hPa temperature field\n image = thermal.draw_temp_high(\n data['t700'].values, data['t700']['lon'].values, data['t700']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"700hPa T | 500hPa GH\", 'time': date_obj})\n images['700hPa_Temperature'] = image\n\n # draw 700hPa relative humidity\n rh = calc.relative_humidity_from_specific_humidity(700 * units.hPa, data['t700'], data['q700']) * 100\n image = moisture.draw_rh_high(\n data['u700'].values, data['v700'].values, rh.values,\n data['u700']['lon'].values, data['u700']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"700hPa RH | Wind | 500hPa GH\", 'time': date_obj})\n images['700hPa_Relative_Humidity'] = image\n\n # draw 850hPa wind field\n image = dynamics.draw_wind_high(\n data['u850'].values, data['v850'].values, \n data['u850']['lon'].values, data['u850']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"850hPa Wind | 500hPa GH\", 'time': date_obj})\n images['850hPa_Wind'] = image\n\n # draw 850hPa temperature field\n image = thermal.draw_temp_high(\n data['t850'].values, data['t850']['lon'].values, data['t850']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"850hPa T | 500hPa GH\", 'time': date_obj})\n images['850hPa_Temperature'] = image\n\n # draw 850hPa relative humidity\n rh = calc.relative_humidity_from_specific_humidity(850 * units.hPa, data['t850'], data['q850']) * 100\n image = moisture.draw_rh_high(\n data['u850'].values, data['v850'].values, rh.values,\n data['u850']['lon'].values, data['u850']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"850hPa RH | Wind | 500hPa GH\", 'time': date_obj})\n images['850hPa_Relative_Humidity'] = image\n\n # draw 850hPa specific field\n image = moisture.draw_sp_high(\n data['u850'].values, data['v850'].values, data['q850'].values*1000.,\n data['q850']['lon'].values, data['q850']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"850hPa SP | Wind | 500hPa GH\", 'time': date_obj})\n images['850hPa_Specific_Humidity'] = image\n\n # draw 925hPa temperature field\n image = thermal.draw_temp_high(\n data['t925'].values, data['t925']['lon'].values, data['t925']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"925hPa T | 500hPa GH\", 'time': date_obj})\n images['925hPa_Temperature'] = image\n\n # draw 925hPa wind field\n image = dynamics.draw_wind_high(\n data['u925'].values, data['v925'].values, \n data['u925']['lon'].values, data['u925']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"925hPa Wind | 500hPa GH\", 'time': date_obj})\n images['925hPa_Wind'] = image\n\n # draw 925hPa relative humidity\n rh = calc.relative_humidity_from_specific_humidity(925 * units.hPa, data['t925'], data['q925']) * 100\n image = moisture.draw_rh_high(\n data['u925'].values, data['v925'].values, rh.values,\n data['u925']['lon'].values, data['u925']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"925hPa RH | Wind | 500hPa GH\", 'time': date_obj})\n images['925hPa_Relative_Humdity'] = image\n\n # draw 925hPa specific field\n image = moisture.draw_sp_high(\n data['u925'].values, data['v925'].values, data['q925'].values*1000.,\n data['q925']['lon'].values, data['q925']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"925hPa SP | Wind | 500hPa GH\", 'time': date_obj})\n images['925hPa_Specific_Humidity'] = image\n\n # draw precipitable water field\n image = moisture.draw_pwat(\n data['pwat'].values, data['pwat']['lon'].values, data['pwat']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"Precipitable Water | 500hPa GH\", 'time': date_obj})\n images['Precipitable_Water'] = image\n\n # draw mean sea level pressure field\n image = dynamics.draw_mslp(\n data['mslp'].values, data['mslp']['lon'].values, data['mslp']['lat'].values,\n gh=data['gh500'].values, map_region=map_region,\n title_kwargs={'name':'CFSR', 'head': \"MSLP | 500hPa GH\", 'time': date_obj})\n images['Mean_Sea_Level_Pressure'] = image\n\n return_dict[0] = images", "def _step(self, temperature, mc_args):\n self._log(\"Current temperature {}K. Current chemical_potential:\"\n \" {} eV/atom\".format(\n int(temperature), mc_args[\"chem_potential\"]))\n\n thermo = []\n for i, sgc in enumerate(self._sgc_obj):\n self._log(\"Running MC for system {}\".format(i))\n sgc.T = temperature\n sgc.runMC(**mc_args)\n thermo.append(sgc.get_thermodynamic())\n return thermo", "def _calculate_monomer(self, raw=False):\n ta = self.TimeAxis\n # transition frequency\n om = self.system.elenergies[1]-self.system.elenergies[0]\n # transition dipole moment\n dm = self.system.dmoments[0,1,:]\n # dipole^2\n dd = numpy.dot(dm,dm)\n # natural life-time from the dipole moment\n gama = [0.0] #[-1.0/self.system.get_electronic_natural_lifetime(1)]\n sbi = self.system.get_SystemBathInteraction(self.TimeAxis)\n reorg = sbi.CC.get_reorganization_energy(0,0)\n \n if self.system._has_system_bath_coupling:\n # correlation function\n ct = self.system.get_egcf((0,1)) \n gt = self._c2g(ta,ct.data)\n tr = {\"ta\":ta,\"dd\":dd,\"om\":om-self.rwa,\"ct\":ct,\"gt\":gt,\"gg\":gama,\"fwhm\":0.0}\n else:\n tr = {\"ta\":ta,\"dd\":dd,\"om\":om-self.rwa,\"gg\":gama,\"fwhm\":0.0}\n \n if self._gauss_broad:\n tr[\"fwhm\"] = self.gauss\n\n tr[\"re\"] = reorg\n\n if self._gauss_broad:\n tr[\"fwhm\"] = self.gauss\n\n # calculates the one transition of the monomer \n data = numpy.real(self.one_transition_spectrum_abs(tr))\n data_fl = numpy.real(self.one_transition_spectrum_fluor(tr))\n\n \n for ii in range(2,self.system.Nb[1]+1):\n \n # transition frequency\n om = self.system.elenergies[ii]-self.system.elenergies[0]\n # transition dipole moment\n dm = self.system.dmoments[0,ii,:]\n # dipole^2\n dd = numpy.dot(dm,dm)\n # natural life-time from the dipole moment\n gama = [0.0] #[-1.0/self.system.get_electronic_natural_lifetime(ii)]\n \n if self.system._has_system_bath_coupling:\n # correlation function\n ct = self.system.get_egcf((0,ii)) \n gt = self._c2g(ta,ct.data)\n tr = {\"ta\":ta,\"dd\":dd,\"om\":om-self.rwa,\"ct\":ct,\"gt\":gt,\"gg\":gama,\"fwhm\":0.0}\n else:\n tr = {\"ta\":ta,\"dd\":dd,\"om\":om-self.rwa,\"gg\":gama,\"fwhm\":0.0}\n\n if self._gauss_broad: \n tr[\"fwhm\"] = self.gauss\n \n if self._gauss_broad:\n tr[\"fwhm\"] = self.gauss\n \n data += numpy.real(self.one_transition_spectrum_abs(tr))\n\n # we only want to retain the upper half of the spectrum\n Nt = len(self.frequencyAxis.data)//2 \n do = self.frequencyAxis.data[1]-self.frequencyAxis.data[0]\n st = self.frequencyAxis.data[Nt//2]\n # we represent the Frequency axis anew\n axis = FrequencyAxis(st,Nt,do)\n\n # multiply the spectrum by frequency (compulsory prefactor)\n if not raw:\n data = axis.data*data\n data_fl = (axis.data**3)*data_fl\n\n \n spect_abs = LinSpectrum(axis=axis, data=data)\n fluor_spect = LinSpectrum(axis=axis, data=data_fl)\n \n return {\"abs\": spect_abs, \"fluor\": fluor_spect}", "def __init__(self,\n modeltype='TLusty'):\n if modeltype == 'TLusty':\n self.modtype = 'TLusty_v10'\n self.filebase = 'T*v10_z*.dat'\n self.path = '/home/kgordon/Dust/Ext/Model_Standards_Data/'\n self.read_tlusty_models(self.filebase, self.path)\n else:\n print('model type not supported')\n exit()", "def mv_to_typek(mv):\n tab1 = [\n 0.0000000E+00,\n 2.5173462E+01,\n -1.1662878E+00,\n -1.0833638E+00,\n -8.9773540E-01,\n -3.7342377E-01,\n -8.6632643E-02,\n -1.0450598E-02,\n -5.1920577E-04,\n 0.0000000E+00,\n ]\n\n tab2 = [\n 0.000000E+00,\n 2.508355E+01,\n 7.860106E-02,\n -2.503131E-01,\n 8.315270E-02,\n -1.228034E-02,\n 9.804036E-04,\n -4.413030E-05,\n 1.057734E-06,\n -1.052755E-08,\n ]\n\n tab3 = [\n -1.318058E+02,\n 4.830222E+01,\n -1.646031E+00,\n 5.464731E-02,\n -9.650715E-04,\n 8.802193E-06,\n -3.110810E-08,\n 0.000000E+00,\n 0.000000E+00,\n 0.000000E+00,\n ]\n\n if -5.891 <= mv <= 0.0:\n c = tab1\n elif 0.0 < mv <= 20.644:\n c = tab2\n elif 20.644 < mv <= 54.886:\n c = tab3\n else:\n raise ValueError(\"Voltage specified is out of range for Type K thermocouple\")\n\n t = 0.0\n for p in range(0, len(c)):\n t += c[p] * math.pow(mv, p)\n return t", "def test_water_regulation(self):\n\n for action in self.controller.actions.values():\n for water_level in range(90, 110, 2):\n\n # measure water level\n self.controller.sensor.measure = MagicMock(return_value=water_level)\n\n # get the state of the pump\n self.controller.pump.get_state = \\\n MagicMock(return_value=self.decider.decide(water_level, action, \\\n self.controller.actions)) \\\n\n self.controller.tick()" ]
[ "0.6137704", "0.5869371", "0.57176363", "0.5665171", "0.56162924", "0.5519943", "0.55192375", "0.54380035", "0.54261625", "0.5379157", "0.52772623", "0.52344805", "0.52321464", "0.5232041", "0.52294254", "0.52078223", "0.5178827", "0.51737756", "0.5172065", "0.51715505", "0.5143476", "0.51152176", "0.5102242", "0.5095008", "0.5085313", "0.50810874", "0.5073962", "0.5069687", "0.50694054", "0.5062531", "0.5056567", "0.5052204", "0.50427324", "0.50300413", "0.5029852", "0.50292397", "0.50265855", "0.5011306", "0.5001052", "0.49922204", "0.49909392", "0.4989151", "0.49807814", "0.49781245", "0.49668336", "0.49598092", "0.49545726", "0.49495015", "0.49472108", "0.4941354", "0.49387807", "0.49315497", "0.49224678", "0.4920718", "0.49110797", "0.4906708", "0.4906708", "0.4894788", "0.48933604", "0.48887682", "0.48827487", "0.48810628", "0.4880058", "0.4873482", "0.4867596", "0.4865511", "0.48641863", "0.48629823", "0.4862096", "0.48610547", "0.48604017", "0.4856915", "0.48494172", "0.4845362", "0.48412362", "0.48402977", "0.48325875", "0.48260048", "0.48247385", "0.48198196", "0.48171934", "0.48154867", "0.48126134", "0.48092064", "0.48058248", "0.48052442", "0.48019904", "0.47991383", "0.47963938", "0.4795828", "0.4785395", "0.47707084", "0.47702226", "0.47674653", "0.47607353", "0.47597215", "0.47555792", "0.47492227", "0.47477895", "0.47463265" ]
0.74097484
0
This function generates the training and testing data
def generate_training_testing_dataset(store_id, transactions, meteo_day, max_days=2500, single_barcode=0): # Get the minimum and maximum of date in the transactions min_date = transactions[(transactions['STO_EAN'] == store_id)].min()['TRX_DATETIME'].date() max_date = transactions[(transactions['STO_EAN'] == store_id)].max()['TRX_DATETIME'].date() # Get the number of days between the two date num_days = (max_date - min_date).days # Get the list of unique products barcode in the transactions products_barcode = transactions['BARCODE'].unique() # Only do one single barcode if activated if single_barcode is not None: products_barcode = [products_barcode[single_barcode]] # Array to contain all training data all_data_first_level = [] # For each day and for each product for day in xrange(num_days): print(day) # If we have already considered more days than allowed, stop if day > max_days: break # Get the date corresponding to this day date = min_date + pd.DateOffset(day) # Get the weather of the date weather = get_weather_on_date(date, meteo_day, store_id).head(n=1) # If the weather is empty we skip this day if weather.empty: continue # For each product to include for product_barcode in products_barcode: # Get the volume and inventory data volume = get_volume_product_on_date(product_barcode, date, store_id, transactions) # If no volume could be found skip this date,product pair if volume is None: continue # Get the type of the current date day_type = generate_day_type(date) # Generating complex features based on the simpler one # This contains respectively yesterday, the day before yesterday and the same day as current one in # previous week yesterday = date - pd.DateOffset(1) two_days_ago = date - pd.DateOffset(2) one_week_ago = date - pd.DateOffset(7) # Get the day type of yesterday and 2 days ago day_type_yesterday = generate_day_type(yesterday) day_type_2days_ago = generate_day_type(two_days_ago) # Get the volume of yesterday, 2days ago and 1 week ago volume_yesterday = get_volume_product_on_date(product_barcode, yesterday, store_id, transactions) volume_2days_ago = get_volume_product_on_date(product_barcode, two_days_ago, store_id, transactions) volume_one_week_ago = get_volume_product_on_date(product_barcode, one_week_ago, store_id, transactions) # Get the total sales and the total weight of product done yesterday, 2 days ago and 1 week ago volume_price_yesterday = 0 volume_weight_yesterday = 0 if volume_yesterday is not None: volume_price_yesterday = volume_yesterday["price"] volume_weight_yesterday = volume_yesterday["weight"] volume_price_2days_ago = 0 volume_weight_2days_ago = 0 if volume_2days_ago is not None: volume_price_2days_ago = volume_2days_ago["price"] volume_weight_2days_ago = volume_2days_ago["weight"] volume_price_one_week_ago = 0 volume_weight_one_week_ago = 0 if volume_one_week_ago is not None: volume_price_one_week_ago = volume_one_week_ago["price"] volume_weight_one_week_ago = volume_one_week_ago["weight"] # Using historical weather data weather_yesterday = get_weather_on_date(yesterday, meteo_day, store_id).head(n=1) temperature_min_yesterday = 0 temperature_max_yesterday = 0 if not weather_yesterday.empty: temperature_min_yesterday = weather_yesterday['TEMPERATURE_VALUE_MIN'].values[0] temperature_max_yesterday = weather_yesterday['TEMPERATURE_VALUE_MIN'].values[0] #tmp = [weather['TEMPERATURE_VALUE_MIN'].values[0], weather['TEMPERATURE_VALUE_MAX'].values[0], # weather['PRECIPITATION_VALUE'].values[0], weather['SUNSHINE_DURATION'].values[0], # weather['SNOW_DEPTH'].values[0], day_type, volume["price"], volume["weight"]] # Saving Features tmp = [weather['TEMPERATURE_VALUE_MIN'].values[0], weather['TEMPERATURE_VALUE_MAX'].values[0], day_type, volume["price"], volume_price_yesterday,volume_weight_yesterday, volume_price_2days_ago, volume_weight_2days_ago, volume_price_one_week_ago, volume_weight_one_week_ago, temperature_min_yesterday, temperature_max_yesterday,day_type_yesterday, day_type_2days_ago, volume["weight"]] all_data_first_level.append(tmp) return all_data_first_level
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_train_test(self):\n x, y = self.read_data()\n x_train, y_train, x_test, y_test = self.sample_data(x, y)\n self.train = (x_train, y_train)\n self.test = (x_test, y_test)", "def generate_train_test_data(data_dir = '../../att_faces'):\n\n train_data = [ [ read_image('%s/s%d/%d.pgm'%( data_dir, i, j)) for j in range(1,11)] for i in range(1, 36)]\n test_data = [ [ read_image('%s/s%d/%d.pgm'%( data_dir, i, j)) for j in range(1,11)] for i in range(36, 41)]\n \n true_combinations_train = generate_true_combinations(train_data)\n false_combinations_train = generate_false_combination(train_data, int(len(true_combinations_train) / len(train_data)), 10)\n \n true_combinations_test = generate_true_combinations(test_data)\n false_combinations_test = generate_false_combination(test_data, int(len(true_combinations_test) / len(test_data)), 10)\n \n return prepare_to_classifier(true_combinations_train, false_combinations_train, true_combinations_test, false_combinations_test)", "def get_training_and_testing_sets(data, Y):\r\n data = pd.concat([data, Y], axis=1)\r\n x,y=data.shape\r\n train_X_sub1=data[0:x//6]\r\n dev_X_sub1 = data[x//6:x//6 + x//12]\r\n test_X_sub1 = data[x//6 + x//12:x//3]\r\n\r\n train_X_sub2 = data[x//3:x//3+x//6]\r\n dev_X_sub2 = data[x//6 + x//3:x//3 + x//6 + x//12]\r\n test_X_sub2 = data[x//3 + x//6 + x//12:2*x//3]\r\n\r\n train_X_sub3 = data[2*x//3:(2*x//3) +x//6]\r\n dev_X_sub3 = data[x//6 + 2*x//3: (2*x//3) + x//6 + x//12]\r\n test_X_sub3 = data[2*x//3 + x//6 + x//12:x]\r\n\r\n train_X=train_X_sub1.append(train_X_sub2,ignore_index = True)\r\n train_X =train_X.append(train_X_sub3,ignore_index = True)\r\n dev_X= dev_X_sub1.append(dev_X_sub2,ignore_index = True)\r\n dev_X = dev_X.append(dev_X_sub3,ignore_index = True)\r\n test_X = test_X_sub1.append(test_X_sub2,ignore_index = True)\r\n test_X = test_X.append(test_X_sub3,ignore_index = True)\r\n\r\n\r\n train_X = util.shuffle(train_X)\r\n train_X = train_X.reset_index(drop=True)\r\n\r\n dev_X = util.shuffle(dev_X)\r\n dev_X = dev_X.reset_index(drop=True)\r\n\r\n test_X = util.shuffle(test_X)\r\n test_X = test_X.reset_index(drop=True)\r\n\r\n train_X_final=train_X\r\n dev_X_final = dev_X\r\n test_X_final = test_X\r\n x, y = train_X_final.shape\r\n train_X = train_X_final.iloc[:, 0:y - 1]\r\n train_Y = train_X_final.iloc[:, y - 1]\r\n\r\n x, y = test_X_final.shape\r\n test_X = test_X_final.iloc[:, 0:y - 1]\r\n test_Y = test_X_final.iloc[:, y - 1]\r\n\r\n x, y = dev_X_final.shape\r\n dev_X = dev_X_final.iloc[:, 0:y - 1]\r\n dev_Y = dev_X_final.iloc[:, y - 1]\r\n\r\n return train_X, train_Y, dev_X,dev_Y,test_X, test_Y", "def _create_data():\n tf.logging.info(\"Create records..\")\n train, val, test = util.load_data(data_dir, FLAGS[\"is_aug\"])\n tf.logging.info(\"Dataset size: Train-{} Test-{} Val-{}\".format(len(train), len(test), len(val)))\n return train, val, test", "def make_dataset():\n\n\tnumberOfTrials = dataset_params.num_of_samples\n\tnumberOfTrials_train = int(numberOfTrials*0.8)\n\tnumberOfTrials_test = int(numberOfTrials*0.2)\n\n\tprint(\"==================================================\")\n\tprint(\"1. Generating Train images ......\")\n\tprint(\"\\nTrain image per variation\", numberOfTrials_train)\n\tmakeDataset(numberOfTrials_train, \"train\")\n\n\tprint(\"==================================================\")\n\tprint(\"2. Generating Test images ......\")\n\tprint(\"\\nTest image per variation\", numberOfTrials_test)\n\tmakeDataset(numberOfTrials_test, \"test\")\n\n\tprint(\"==================================================\")\n\tprint(\"Done!!!\")", "def MakeDataSetFiles(dirname):\n\n\n if not os.path.exists(dirname):\n os.mkdir(dirname)\n if not os.path.exists(os.path.join(dirname, 'train')):\n os.mkdir(os.path.join(dirname, 'train'))\n if not os.path.exists(os.path.join(dirname, 'test')):\n os.mkdir(os.path.join(dirname, 'test'))\n data_train = fetch_20newsgroups(subset='train', categories=None, shuffle=True, random_state=42)\n data_test = fetch_20newsgroups(subset='test', categories=None, shuffle=True, random_state=42)\n\n if dirname[-1] == '/' or dirname[-1] == '\\\\':\n dirname = dirname[:-1]\n \n Util.WriteClassFile(data_train.target, os.path.join(dirname, 'train_classes.txt'))\n Util.WriteClassFile(data_test.target,os.path.join(dirname, 'test_classes.txt'))\n\n\n train_counter = 0;\n for doc in data_train.data:\n filename = 'train_' + str(train_counter).zfill(5);\n f = file(os.path.join(dirname, 'train', filename), 'w');\n f.write(doc.encode('ascii', 'ignore'));\n f.close();\n train_counter = train_counter + 1;\n\n test_counter = 0;\n for doc in data_test.data:\n filename = 'test_' + str(test_counter).zfill(5);\n f = file(os.path.join(dirname, 'test', filename), 'w');\n f.write(doc.encode('ascii', 'ignore'));\n f.close();\n test_counter = test_counter + 1;\n\n class_index = file(os.path.join(dirname, 'class_label_index.txt'), 'w')\n for label in data_train.target_names:\n class_index.write(label + '\\n')\n class_index.close()", "def prepare_data(self):\n # Set up the path\n self.path_target_train = os.path.join(self.data_dir, self.train_path_file_target + \".pkl\")\n self.path_target_test = os.path.join(self.data_dir, self.test_path_file_target + \".pkl\")\n\n if not os.path.exists(self.path_target_train) or not os.path.exists(self.path_target_test):\n # Create vocabularies of the appropriate sizes.\n self.create_vocabulary(self.train_path_file)\n\n # Create token ids for the training data.\n input_train_path = self.train_path_file\n target_train_path = self.train_path_file_target\n train_input, train_input_length, train_labels = self.data_to_token_ids(input_train_path, target_train_path)\n\n # Create token ids for the validation data.\n input_test_path = self.test_path_file\n target_test_path = self.test_path_file_target\n test_input, test_input_length, _ = self.data_to_token_ids(input_test_path, target_test_path, train=False)\n\n # Collect data into a list\n training_data = [train_input, train_input_length, train_labels]\n test_data = [test_input, test_input_length]\n\n # Save all the data\n with open(self.path_target_train, 'wb') as f:\n pickle.dump(training_data,f)\n with open(self.path_target_test, 'wb') as f:\n pickle.dump(test_data, f)\n else:\n # Load data\n with open(self.path_target_train, 'rb') as f:\n training_data = pickle.load(f)\n with open(self.path_target_test, 'rb') as f:\n test_data = pickle.load(f)\n\n # Initialize vocabulary\n self.initialize_vocabulary()\n\n # Convert list into a numpy array - train data\n train_input = pd.DataFrame(training_data[0]).fillna(value=0).astype(int).values\n train_length_input = np.array(training_data[1], dtype=int)\n train_labels = np.array(training_data[2], dtype=int)\n\n # Convert list into a numpy array - test data\n test_input = pd.DataFrame(test_data[0]).fillna(value=0).astype(int).values\n test_length_input = pd.DataFrame(test_data[1]).fillna(value=0).astype(int).values\n\n # Printing maximum length\n print(\"Shape of the input training matrix {}\".format(str(train_input.shape)))\n print(\"Shape of the input test matrix {}\".format(str(test_input.shape)))\n\n # Copy the files\n self.copy_files()\n\n # Return output\n return train_input, train_length_input, train_labels, test_input, test_length_input", "def generate(self):\n self.training_data.gen_x(self.x_func)\n self.training_data.gen_a(self.a_func)\n self.training_data.gen_y(self.y_func)\n \n self.testing_data.gen_x(self.x_func)\n self.testing_data.gen_ys(self.y_func)\n self.testing_data.gen_azero(self.ytotal_func)", "def data_set_maker():\n\n # crate a folder in your code directory and name it: \"files\". put the .npy files iside that folder\n\n x_all = np.load(path + '/files/tinyX.npy', 'r') # reads the input file\n y_all = np.load(path + '/files/tinyY.npy', 'r') # reads the input file\n\n # split the data into 10% validation-set and 90% training set\n raw_train, raw_valid, y_train, y_valid = train_test_split(x_all, y_all, test_size=0.2, random_state=43)\n return raw_train, raw_valid, y_train, y_valid", "def _generate_datasets(self):\n\n degrade_test = False\n if self._opts['degrade_step'] == 'test':\n degrade_test = True\n\n use_trainset_for_tests = UseTrainForTest.IDENTICAL # can be different in few shot workflow\n\n train_dataset, test_dataset = self._gen_datasets_with_options(self._opts['train_classes'],\n self._opts['test_classes'],\n is_superclass=self._opts['superclass'],\n class_proportion=self._opts['class_proportion'],\n degrade_test=degrade_test,\n degrade_type=self._opts['degrade_type'], # only relevant if degrade_test = True\n degrade_val=self._opts['min_val'], # only relevant if degrade_test = True\n recurse_train=self._is_train_recursive(),\n recurse_test=self._is_inference_recursive(),\n num_batch_repeats=self._opts['num_repeats'],\n recurse_iterations=self._opts['recurse_iterations'],\n evaluate_step=self._opts['evaluate'],\n use_trainset_for_tests=use_trainset_for_tests,\n invert_images=self._opts['invert_images'],\n min_val=self._opts['min_val'])\n return train_dataset, test_dataset", "def test_training(self):\n\t\tpass", "def data_set_maker():\n\n # crate a folder in your code directory and name it: \"files\". put the .npy files iside that folder\n path = os.getcwd() # reads the current path\n x_train = np.load(path + '/files/tinyX.npy', 'r') # reads the input file\n y_train = np.load(path + '/files/tinyY.npy', 'r') # reads the input file\n x_test = np.load(path + '/files/tinyX_test.npy', 'r') # reads the input file\n x_train, y_train = shuffle(x_train, y_train)\n\n return x_train, y_train, x_test", "def produce_init(filename):\n training_dataset = pd.read_csv(f'../Modified Data/{filename}')\n test_dataset = pd.read_csv(f'../Raw Data/test.csv')\n features = list(training_dataset.columns)\n features.remove('SalePrice')\n predict_feature = ['SalePrice']\n\n # Produce Test Data\n test_X = test_dataset.loc[:, features]\n ids_test = test_dataset.loc[:, 'Id']\n\n for column in features:\n if str(training_dataset.loc[:, column].dtype) == 'object':\n # Initialize encoder\n labelencoder = LabelEncoder()\n # Encode Train Data\n training_dataset.loc[:, column] = training_dataset.loc[:, column].fillna('Missing')\n training_dataset.loc[:, column] = pd.Series(labelencoder.fit_transform(training_dataset.loc[:, column]))\n # Encode Test Data\n test_X.loc[:, column] = test_X.loc[:, column].fillna('Missing')\n test_X.loc[:, column] = pd.Series(labelencoder.fit_transform(test_X.loc[:, column]))\n else:\n # Fix missing values for train data\n training_dataset.loc[:, column] = training_dataset.loc[:, column].fillna(int(training_dataset.loc[:, column].mean()))\n # Fix missing values for test data\n test_X.loc[:, column] = test_X.loc[:, column].fillna(int(test_X.loc[:, column].mean()))\n\n return training_dataset, test_X, ids_test", "def Train_data():\n print (\"loading train data ...\")\n time_start = time.time()\n data_root = '/media/keziwen/86AA9651AA963E1D'\n with h5py.File(join(data_root, './data/train_real2.h5')) as f:\n data_real = f['train_real'][:]\n num, nt, ny, nx = data_real.shape\n data_real = np.transpose(data_real, (0, 1, 3, 2))\n with h5py.File(join(data_root, './data/train_imag2.h5')) as f:\n data_imag = f['train_imag'][:]\n num, nt, ny, nx = data_imag.shape\n data_imag = np.transpose(data_imag, (0, 1, 3, 2))\n data = data_real+1j*data_imag\n num_train = 15000\n num_validate = 2000\n train_data = data[0:num_train]\n validate_data = data[num_train:num_train+num_validate]\n\n train_data = np.random.permutation(train_data)\n\n time_end = time.time()\n print ('dataset has been created using {}s'.format(time_end-time_start))\n return train_data, validate_data", "def data_setup(self):\n # Make sure the dataset is download and put into the data folder\n training_data = pd.read_csv('./data/train.csv', sep=',', nrows=self.training_dataset_size)\n testing_data = pd.read_csv('./data/test.csv', sep=',' , nrows=self.training_dataset_size)\n question_list1 = training_data['question1']\n question_list2 = training_data['question2']\n is_duplicate = training_data['is_duplicate']\n # for will\n X = []\n Y = []\n for i in range(0, 1000):\n print(\"*\"*20, i ,\"*\"*20 )\n feature = self.call_feature_generator(question_list1[i],question_list2[i], self.feature_code )\n X.append(feature)\n Y.append(is_duplicate[i])\n print(feature)\n print(is_duplicate[i])\n print(question_list1[i])\n print(question_list2[i])\n\n # we train classifier\n\n classifer = self.call_classifier(X, Y, self.classifier_code)\n\n # testing\n testX = []\n testY = []\n\n for i in range(1001, 1500):\n print(\"-\"*20, i ,\"-\"*20 )\n feature = self.call_feature_generator(question_list1[i],question_list2[i], self.feature_code )\n testX.append(feature)\n testY.append(is_duplicate[i])\n\n X= np.array(testX).reshape(-1,1)\n\n calculate_y = classifer.predict(X)\n\n print(calculate_y)\n tp = 0.0\n fp = 0.0\n fn = 0.0\n\n for i in range(0, len(calculate_y)):\n if calculate_y[i] == testY[i]:\n print(\"Tp : \", testX[i], question_list1[i], question_list2[i], is_duplicate[i] )\n tp += 1.0\n else:\n if testY[i] == 1 and calculate_y[i] == 0:\n print(\"Fn : \", testX[i] , question_list1[i], question_list2[i], is_duplicate[i] )\n fn += 1.0\n else:\n print(\"Fp : \", testX[i], question_list1[i], question_list2[i], is_duplicate[i])\n fp += 1.0\n\n print(\"Tp: \", tp, \" Fp: \", fp, \" Fn: \", fn)\n print(\"Accuracy \", tp/( tp+fn), \"%\")\n\n result = precision_recall_fscore_support(testY, calculate_y)\n print (\"Precision: Class 1 - \", result[0][0], \"% and Class 0 - \", result[0][1], \"%\")\n print (\"Recall: Class 1 - \", result[1][0], \"% and Class 0 - \", result[1][1], \"%\")\n print (\"F-Score: Class 1 - \", result[2][0], \"% and Class 0 - \", result[2][1], \"%\")", "def generate_data(project_data, config, split_method = RAW) :\n training_data = []\n testing_data = []\n \n # Flatten the data (collapse the project and session hierarchy into a list of session_data)\n for v in config.train_project_names:\n # Data in all sessions of one project\n project_session_data = random.sample(project_data[v], len(project_data[v]))\n\n training_data += project_session_data[int(config.session_training_percentage[0] * len(project_session_data)):\n int(config.session_training_percentage[1] * len(project_session_data))]\n\n if config.double_training:\n for i in xrange(int(config.session_training_percentage[0] * len(project_session_data)),\n int(config.session_training_percentage[1] * len(project_session_data))):\n session_data = project_session_data[i]\n\n reversed_session_data = {}\n reversed_session_data[SESSION_NAME] = session_data[SESSION_NAME] + \"_reversed\"\n reversed_session_data[SESSION_EVENTS] = []\n\n def reverse_point_data_qsr(point_data):\n reversed_point_data = point_data[:4]\n # Hands to objects feature swap\n reversed_point_data += point_data[8:12] \n reversed_point_data += point_data[4:8]\n\n # Centroid direction and distance difference is symmetric\n reversed_point_data += point_data[12:14]\n\n # Object corners swap\n reversed_point_data += point_data[16:18] \n reversed_point_data += point_data[14:16]\n\n reversed_point_data += point_data[18:19]\n reversed_point_data += point_data[20:21] \n reversed_point_data += point_data[19:20]\n\n # For QTCCS\n reversed_point_data += point_data[22:23]\n reversed_point_data += point_data[21:22]\n reversed_point_data += point_data[24:25]\n reversed_point_data += point_data[23:24]\n\n # # For difference of features\n # fl = 21\n # reversed_point_data += point_data[fl:fl + 4]\n # # Hands to objects feature swap\n # reversed_point_data += point_data[fl + 8:fl + 12] \n # reversed_point_data += point_data[fl + 4:fl + 8]\n\n # # Centroid direction and distance difference is symmetric\n # reversed_point_data += point_data[fl + 12:fl + 14]\n\n # # Object corners swap\n # reversed_point_data += point_data[fl + 16:fl + 18] \n # reversed_point_data += point_data[fl + 14:fl + 16]\n\n # reversed_point_data += point_data[fl + 18:fl + 19]\n # reversed_point_data += point_data[fl + 20:fl + 21] \n # reversed_point_data += point_data[fl + 19:fl + 20]\n\n return reversed_point_data\n\n def reverse_point_data_sparse_qsr(point_data):\n reversed_point_data = point_data[:2 * 56]\n # Hands to objects feature swap\n reversed_point_data += point_data[4 * 56:6 * 56] \n reversed_point_data += point_data[2 * 56:4 * 56]\n\n # Centroid direction and distance difference is symmetric\n reversed_point_data += point_data[6 * 56:7 * 56]\n\n # Object corners swap\n reversed_point_data += point_data[8 * 56:9 * 56] \n reversed_point_data += point_data[7 * 56:8 * 56]\n\n anchor = 9 * 56\n reversed_point_data += point_data[anchor:anchor + 2]\n reversed_point_data += point_data[anchor + 2 * 2:anchor + 3 * 2] \n reversed_point_data += point_data[anchor + 2:anchor + 2 * 2]\n\n anchor = 9 * 56 + 3 * 2\n # For QTCCS\n reversed_point_data += point_data[anchor + 3:anchor + 2 * 3]\n reversed_point_data += point_data[anchor:anchor + 3]\n reversed_point_data += point_data[anchor + 3 * 3:anchor + 4 * 3]\n reversed_point_data += point_data[anchor + 2 * 3:anchor + 3 * 3]\n\n return reversed_point_data\n\n reversed_session_data[SESSION_DATA] = []\n for point_data in session_data[SESSION_DATA]:\n if split_method == RAW:\n reversed_point_data = point_data[:39]\n reversed_point_data += point_data[51:63]\n reversed_point_data += point_data[39:51]\n elif split_method == PCAS:\n reversed_point_data = point_data[:6]\n # Object centroid swap\n reversed_point_data += point_data[8:10] \n reversed_point_data += point_data[6:8]\n # Object corners swap\n reversed_point_data += point_data[14:18] \n reversed_point_data += point_data[10:14]\n elif split_method == QSR or split_method == EVENT:\n reversed_point_data = reverse_point_data_qsr(point_data)\n elif split_method == SPARSE_QSR:\n reversed_point_data = reverse_point_data_sparse_qsr(point_data)\n\n reversed_session_data[SESSION_DATA].append(reversed_point_data)\n\n for event_str in session_data[SESSION_EVENTS]:\n reversed_event_str = {}\n for key in event_str:\n reversed_event_str[key] = event_str[key]\n\n subj, obj, theme, event, prep = event_str['label']\n def swap_objects(value):\n if value == 2:\n return 3\n if value == 3:\n return 2\n return value\n\n reversed_event_str['label'] = (swap_objects(subj), swap_objects(obj), swap_objects(theme), event, prep)\n\n reversed_session_data[SESSION_EVENTS].append(reversed_event_str)\n\n training_data.append(reversed_session_data)\n\n\n testing_data += project_session_data[int(config.session_testing_percentage[0] * len(project_session_data)):\n int(config.session_testing_percentage[1] * len(project_session_data))]\n \n return (training_data, testing_data)", "def create_train_valid_set(self):\n\n if not self.eq_train:\n X_train_high_level, X_valid_high_level, X_train_low_level, X_valid_low_level, train_w, valid_w, y_train, y_valid = train_test_split(self.X_train_high_level, self.X_train_low_level, self.train_weights, self.y_train,\n train_size=0.7, test_size=0.3\n )\n else:\n X_train_high_level, X_valid_high_level, X_train_low_level, X_valid_low_level, train_w, valid_w, w_train_eq, w_valid_eq, y_train, y_valid = train_test_split(self.X_train_high_level, self.X_train_low_level,\n self.train_weights, self.train_weights_eq, self.y_train,\n train_size=0.7, test_size=0.3\n )\n self.train_weights_eq = w_train_eq\n\n #NOTE: might need to re-equalise weights in each folds as sumW_sig != sumW_bkg anymroe!\n self.train_weights = train_w\n self.valid_weights = valid_w #validation weights should never be equalised weights!\n\n print 'creating validation dataset'\n self.X_train_high_level = X_train_high_level\n self.X_train_low_level = self.join_objects(X_train_low_level)\n\n self.X_valid_high_level = X_valid_high_level\n self.X_valid_low_level = self.join_objects(X_valid_low_level)\n print 'finished creating validation dataset'\n\n self.y_train = y_train\n self.y_valid = y_valid", "def createTrainTestSets():\n tweets = open(noDuplicatesFilename, 'r').read().splitlines()\n name_mapping = loadNameMapping()\n holdoutLocations = [u'Frederiksberg, Danmark', u'T\\xe5rnby, Danmark', u'Kolding, Danmark', u'T\\xe4by, Sverige', u'Kungsbacka, Sverige', u'Kristianstad, Sverige', u'Bod\\xf8, Norge', u'Kvinnherad, Norge', u'Ullensaker, Norge']\n testSetLocation = []\n rest = []\n for tweet in tweets:\n if stringToTweet(tweet).getFullName() in holdoutLocations:\n testSetLocation.append(tweet)\n else:\n rest.append(tweet)\n tweets = rest\n testIndex = int(round(len(tweets) * (1 - test_set_ratio)))\n random.seed(1)\n random.shuffle(tweets)\n trainSet = tweets[:testIndex]\n testSet = tweets[testIndex:]\n open(trainSetFilename, 'w').write('\\n'.join(trainSet))\n open(testSetNormalFilename, 'w').write('\\n'.join(testSet))\n open(testSetLocationFilename, 'w').write('\\n'.join(testSetLocation))\n print \"Wrote %d tweets to train set\" % len(trainSet)\n print \"Wrote %d tweets to normal test set\" % len(testSet)\n print \"Wrote %d tweets to location test set\" % len(testSetLocation)", "def generate_dataset(self):\n sets = {\n \"train\": 10,\n \"test\": 5,\n }\n\n fields = {\n \"strings_list\": lambda x: str_to_ascii(self.generate_string_list(x)),\n \"data\": lambda x: np.random.randint(0, 10, (x, 10)),\n \"number\": lambda x: np.array(range(x)),\n \"field_with_a_long_name_for_printing\": lambda x: np.array(range(x)),\n }\n\n lists = {\n \"list_dummy_data\": np.array(range(10)),\n \"list_dummy_number\": np.array(range(10), dtype=np.uint8),\n }\n\n dataset = {}\n data_fields = {}\n for set_name in sets:\n dataset[set_name] = self.populate_set(sets[set_name], fields, lists)\n data_fields[set_name] = sorted(dataset[set_name].keys())\n\n return dataset, data_fields", "def build_data(seed):\n rs = np.random.RandomState(seed)\n\n def y(x):\n \"\"\" y(x) = 1 + 0.3 * x_1 - 0.6 * x_2^2 - 0.2 * x_3^3 + 0.5 x_4^4 \"\"\"\n x1, x2, x3, x4 = x[:, 0], x[:, 1], x[:, 2], x[:, 3]\n return 1 + 0.3 * x1 - 0.6 * x2 ** 2 - 0.2 * x3 ** 3 + 0.5 * x4 ** 4\n\n xtrain = rs.rand(10000, 4)\n xtest = rs.rand(1000, 4)\n ytrain = y(xtrain) + rs.rand(10000) / 10\n ytest = y(xtest) + rs.rand(1000) / 10\n return xtrain, xtest, ytrain, ytest", "def generate_data(groups):\n # get path list for the intended classification problem\n input_paths = generate_input_list(groups) \n X_lst = []\n y = []\n for p in input_paths:\n dp = pd.read_csv(p, sep = '\\t') #datapoint\n # Normalization \n # norm = lambda x: (x - x.mean()) / x.std()\n # dp = dp.apply(norm)\n # Min-Max scaling \n #dp_norm = (dp - dp.min()) / (dp.max() - dp.min())\n #dp = dp_norm.values\n if dp.isnull().sum().sum()>0:\n# print(p, dp.isnull().sum().sum())\n continue\n dp = dp.drop(['time'], axis = 1) \n dp = dp.iloc[:1600:4]\n\n if dp.isnull().sum().sum()>0:\n# print('after norm',p, dp.isnull().sum().sum())\n continue\n dp = dp.values\n\n X_lst.append(dp)\n sample_y = get_target(p, text= True)\n y.append(sample_y)\n X = np.stack(X_lst, axis=0)\n \n # convert y into int 0 and 1\n encoder = LabelEncoder()\n encoder.fit(y)\n y = encoder.transform(y)\n y_dummy = y\n # convert y into one-hot encoding\n if len(groups)>2:\n y_dummy = pd.get_dummies(y)\n y_dummy = y_dummy.values\n return X, y , y_dummy", "def test_generate_all_training(self):\n facade = ChatetteFacade.get_or_create()\n\n input_dir_path = \"tests/system-testing/inputs/generate-all/\"\n input_filenames = [\n \"simplest.chatette\", \"only-words.chatette\",\n \"words-and-groups.chatette\", \"alias.chatette\", \"include.chatette\",\n \"slot.chatette\", \"slotrolegroup.chatette\"\n ]\n for filename in input_filenames:\n file_path = os.path.join(input_dir_path, filename)\n facade.run(file_path)\n if not TestSystem.check_no_duplicates(facade.train_examples):\n pytest.fail(\n \"Some examples were generated several times \" +\n \"when dealing with file '\" + filename + \"'.\\nGenerated: \" + \\\n str(facade.train_examples)\n )\n legal_examples = TestSystem.get_legal_examples(file_path)\n for ex in facade.train_examples:\n formatted_ex = {\"intent\": ex.intent_name, \"text\": ex.text}\n if formatted_ex not in legal_examples:\n pytest.fail(\n str(formatted_ex) + \" is not a legal example for '\" + \\\n file_path + \"'\"\n )\n if len(legal_examples) != len(facade.train_examples):\n training_texts = [ex.text for ex in facade.train_examples]\n for legal_ex in legal_examples:\n if legal_ex[\"text\"] not in training_texts:\n pytest.fail(\n \"Example '\" + legal_ex[\"text\"] + \\\n \"' was not generated.\"\n )\n pytest.fail(\n \"An unknown example was not generated (\" + \\\n str(len(facade.train_examples)) + \\\n \" generated instead of \" + str(len(legal_examples)) + \\\n \").\\nGenerated: \" + str(facade.train_examples)\n )\n legal_syn = TestSystem.get_legal_synonyms(file_path)\n if legal_syn is not None:\n synonyms = AST.get_or_create().get_entities_synonyms()\n for key in synonyms:\n if key not in legal_syn:\n pytest.fail(\n \"'\" + key + \"' shouldn't have any synonyms.\"\n )\n for syn in synonyms[key]:\n if syn not in legal_syn[key]:\n pytest.fail(\n \"'\" + syn + \"' shouldn't be a synonym of '\" + \\\n key + \"'\"\n )", "def prep_data_fn(self, st_train_dt, end_train_dt, st_val_dt, end_val_dt, st_test_dt, end_test_dt):\n df = self.get_prep_data()\n train = df[(df['ft_data_dt'] >= st_train_dt) & (df['ft_data_dt'] <= end_train_dt)]\n val = df[(df['ft_data_dt'] >= st_val_dt) & (df['ft_data_dt'] <= end_val_dt)].sample(frac=0.4, random_state=2021)\n test = df[(df['ft_data_dt'] >= st_test_dt) & (df['ft_data_dt'] <= end_test_dt)]\n print(f'----train----')\n print(train[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n print(f'----validation----')\n print(val[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n print(f'----test----')\n print(test[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n self.set_train(train)\n self.set_validation(val)\n self.set_test(test)\n train_X = train[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n train_y = train['target']\n val_X = val[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n val_y = val['target']\n test_X = test[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n test_y = test['target']\n self.set_train_X(train_X)\n self.set_train_y(train_y)\n self.set_val_X(val_X)\n self.set_val_y(val_y)\n self.set_test_X(test_X)\n self.set_test_y(test_y)", "def create_train_test(option, transform, params, split=0.2):\r\n clip_im_dir = option.clip_im_dir\r\n matting_dir = option.matting_dir\r\n csv_path = option.csv_path\r\n \r\n print(\"create datasets\")\r\n \r\n \r\n data_df = pd.read_csv(csv_path)\r\n # data_df = MergeDataframe(clip_im_dir, matting_dir)\r\n \r\n #separate data in training and test data (20/80)\r\n train_df, test_df = train_test_split(data_df, test_size=split)\r\n \r\n #search right Dataset class\r\n package_dir = Path(src.dataset.__file__).resolve().parent\r\n\r\n for (_, module_name, _) in iter_modules([package_dir]):\r\n # print(module_name, self.ComType)\r\n if option.dataset.lower() == module_name.lower() :\r\n modelModule = importlib.import_module(\".\"+module_name)\r\n break\r\n \r\n # train data\r\n training_set = modelModule(train_df, clip_im_dir, matting_dir, transform, transform)\r\n train_loader = DataLoader(training_set, **params)\r\n \r\n \r\n #test data\r\n testing_set = modelModule(test_df, clip_im_dir, matting_dir, transform, transform)\r\n test_loader = DataLoader(testing_set, **params)\r\n \r\n return train_loader, test_loader", "def set_data():\r\n #if not os.path.exists(filepath):\r\n #download_data()\r\n metadata = read(filepath + flist[-1])\r\n ndata = metadata['num_cases_per_batch']\r\n ndim = metadata['num_vis']\r\n\r\n data, train, test = {}, {}, {}\r\n data['labels'] = metadata['label_names']\r\n data['ntraindata'] = metadata['num_cases_per_batch'] * (len(flist) - 2)\r\n data['ntestdata'] = metadata['num_cases_per_batch']\r\n data['ndim'] = metadata['num_vis']\r\n\r\n train['x'], train['y'] = convert_train(data['ntraindata'], data['ndim'])\r\n\r\n testdata = read(filepath + flist[-2])\r\n test['x'] = testdata['data']\r\n test['y'] = testdata['labels']\r\n\r\n data['train'], data['test'] = train, test\r\n save_pkl(data)", "def get_test_batches(data_dir='/home/yunhan/batchified'):\n # train 3 valid 1\n # Use batch 1 - 53 as train (60%), 54 - 71 as validation (20%), 72 - 89 as test (20%)\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 72\n for i in range(n):\n X = np.load(\"%s/X%d.npy\" % (data_dir, idx[i]))/255.\n Y = np.load(\"%s/y%d.npy\" % (data_dir, idx[i])).reshape(-1)\n yield X, Y", "def train(self, training_data):\n pass", "def create_sets(test, data, test_size=0.2, write=False):\n y_test = test['y_old']\n X_test = test.drop('y_old', 1)\n y_data = data['y_old']\n X_data = data.drop('y_old', 1)\n X_train, X_val, y_train, y_val = train_test_split(X_data, y_data, test_size=test_size, random_state=123)\n if write:\n pickle.dump((X_train, X_val, y_train, y_val), open(obj_save_path+'train_val_df.p', 'wb'))\n #X_train, X_val, y_train, y_val = pickle.load(open(obj_save_path+'train_val_df.p', 'rb'))\n return X_train, y_train, X_val, y_val, X_test, y_test", "def make_data(input_filepath, output_filepath):\n\n df_train = pd.read_csv(input_filepath+'train_u6lujuX_CVtuZ9i.csv', index_col=0)\n df_test = pd.read_csv(input_filepath+'test_Y3wMUE5_7gLdaTN.csv', index_col=0)\n print('Sizes', df_train.shape, df_test.shape)\n print(\"Outcome dispersion:\\n\", df_train['Loan_Status'].value_counts())\n\n\n # recode and save outcome vector\n y = df_train['Loan_Status'].map({'N': 0, 'Y': 1})\n\n del df_train['Loan_Status']\n\n # all in one dataframe\n df = pd.concat([df_train, df_test])\n print(df.shape)\n\n from src.features.build_features import make_features\n df = make_features(df)\n\n # Divide data on train and test again and save\n data_train = df[df.index.isin(df_train.index)]\n data_test = df[df.index.isin(df_test.index)]\n print(data_train.shape, data_test.shape)\n\n data_tmp = data_train.copy()\n data_tmp['y'] = y\n\n\n data_tmp.to_csv(output_filepath + 'train_ready.csv', index=False)\n data_test.to_csv(output_filepath + 'test_ready.csv', index=False)\n id_test = pd.DataFrame(data=df_test.index, columns=['Loan_ID'])\n id_test.to_csv(output_filepath + 'id_test.csv', index=False)", "def get_data(generator, random, bench_id):\n x_train, y_train, x_test, y_test = generator(random, bench_id)\n x_train = np.c_[np.ones(len(x_train)), x_train]\n x_test = np.c_[np.ones(len(x_test)), x_test]\n return x_train, y_train, x_test, y_test", "def data_creator(config):\n train_dataset, val_dataset = LinearDataset(2, 5), LinearDataset(2, 5)\n train_loader = DataLoader(train_dataset, batch_size=config[\"batch_size\"])\n val_loader = DataLoader(val_dataset, batch_size=config[\"batch_size\"])\n return train_loader, val_loader", "def get_dldata(filepath, dlTrainCorpusPath, dlTestCorpusPath, seed=2018, batch_size=16):\r\n\tf = open(\"record/synthetic and academic datasets/testcases_train.pkl\",'rb') #get the testcase ids of train sets and test sets\r\n\ttestcases += pickle.load(f) \r\n\tf.close()\r\n\r\n\tf = open(\"record/synthetic and academic datasets/testcases_test.pkl\",'rb')\r\n\ttestcases += pickle.load(f)\r\n\tf.close()\r\n\t\r\n print(\"produce train dataset...\") \r\n N = 6\r\n num = list(range(N))\r\n for i in num:\r\n train_set = [[], [], [], [], [], []]\r\n for folder_train in folders_train[int(i*len(folders_train)/N) : int((i+1)*len(folders_train)/N)]:\r\n if not folder_train in os.listdir(filepath):\r\n continue\r\n print(\"\\r\"+str(folder_train), end='')\r\n for filename in os.listdir(os.path.join(filepath, folder_train)):\r\n f = open(filepath + folder_train + '/' + filename, 'rb')\r\n data = pickle.load(f)\r\n f.close()\r\n if len(data[0][0]) > MAXLEN:\r\n data[2] = [x for x in data[2] if x <= MAXLEN]\r\n data[0] = cutdata(data[0][0])\r\n if data[0] == None:\r\n continue \r\n for n in range(len(data)):\r\n train_set[n].append(data[n])\r\n train_set[-1].append(folder_train+\"/\"+filename)\r\n f_train = open(dlTrainCorpusPath + \"train_\" + str(i)+ \"_0818.pkl\", 'wb')\r\n pickle.dump(train_set, f_train)\r\n f_train.close()\r\n\r\n del train_set \r\n gc.collect() \r\n\r\n print(\"\\nproduce test dataset...\")\r\n N = 6\r\n num = list(range(N))\r\n for i in num:\r\n test_set = [[], [], [], [], [], []]\r\n for folder_test in folders_test[int(i*len(folders_test)/N) : int((i+1)*len(folders_test)/N)]:\r\n if not folder_test in os.listdir(filepath):\r\n continue\r\n print(\"\\r\"+str(folder_test), end='')\r\n for filename in os.listdir(os.path.join(filepath, folder_test)):\r\n f = open(filepath + folder_test + '/' + filename, 'rb')\r\n data = pickle.load(f)\r\n f.close()\r\n if len(data[0][0]) > MAXLEN:\r\n data[2] = [x for x in data[2] if x <= MAXLEN]\r\n data[0] = cutdata(data[0][0])\r\n if data[0] == None:\r\n continue \r\n for n in range(len(data)):\r\n test_set[n].append(data[n])\r\n test_set[-1].append(folder_test+\"/\"+filename)\r\n \r\n f_test = open(dlTestCorpusPath + \"test_\" + str(i)+ \"_0124.pkl\", 'wb')\r\n pickle.dump(test_set, f_test)\r\n f_test.close()\r\n\r\n del test_set\r\n gc.collect()\r\n return", "def run():\r\n \r\n LABEL = data.LabelField(use_vocab=True)\r\n TEXT = data.Field(sequential=True, tokenize=lambda x:x.split(), lower=True, fix_length=config.MAX_LENGTH)\r\n\r\n### 1/5\r\n dataset = data.TabularDataset(path=config.TRAIN_DATASET_FNAME, \r\n format='csv', \r\n fields=[('text', TEXT),('label', LABEL)], \r\n skip_header=True)\r\n # split the dataset, 8:2\r\n train_dataset, valid_dataset = dataset.split(split_ratio=[0.8,0.2], random_state=random.getstate())\r\n \r\n test_data = data.TabularDataset(path=config.TEST_DATASET_FNAME,\r\n format='csv', \r\n fields=[('text', TEXT),('label', LABEL)], \r\n skip_header=True)\r\n \r\n### 2\r\n# train_dataset = data.TabularDataset(path=config.TRAIN_DATASET_FNAME, \r\n# format='csv', \r\n# fields=[('text', TEXT),('label', LABEL)], \r\n# skip_header=True) \r\n# valid_dataset = data.TabularDataset(path=config.VAL_DATASET_FNAME, \r\n# format='csv', \r\n# fields=[('text', TEXT),('label', LABEL)], \r\n# skip_header=True) \r\n \r\n# test_data = data.TabularDataset(path=config.TEST_DATASET_FNAME,\r\n# format='csv', \r\n# fields=[('text', TEXT),('label', LABEL)], \r\n# skip_header=True)\r\n \r\n### 3/4\r\n# train_dataset = data.TabularDataset(path=config.TRAIN_DATASET_FNAME, \r\n# format='csv', \r\n# fields=[('text', TEXT),('label', LABEL)], \r\n# skip_header=True) \r\n \r\n# dataset = data.TabularDataset(path=config.TEST_DATASET_FNAME, \r\n# format='csv', \r\n# fields=[('text', TEXT),('label', LABEL)], \r\n# skip_header=True)\r\n# # split the dataset, 5:5\r\n# valid_dataset, test_data = dataset.split(split_ratio=[0.5,0.5], random_state=random.getstate())\r\n\r\n### 5\r\n\r\n\r\n\r\n # load embeddings\r\n vectors_data = load_vectors(config.EMBEDDING_FNAME)\r\n\r\n TEXT.build_vocab(train_dataset, vectors=vectors_data)\r\n LABEL.build_vocab(train_dataset)\r\n print ('vector size:',TEXT.vocab.vectors.size())\r\n embedding_pretrained_matrix = TEXT.vocab.vectors\r\n \r\n # create torch device\r\n print(\"To device...\")\r\n USE_CUDA = torch.cuda.is_available()\r\n device = torch.device(\"cuda\" if USE_CUDA else \"cpu\")\r\n\r\n train_it, valid_it = data.BucketIterator.splits((train_dataset, valid_dataset),\r\n batch_sizes=(config.TRAIN_BATCH_SIZE,config.VAL_BATCH_SIZE), \r\n device=device, \r\n sort_key=lambda x: len(x.text),\r\n sort_within_batch=False,\r\n shuffle=True,\r\n repeat=False)\r\n test_it = data.BucketIterator(test_data, \r\n batch_size=config.TEST_BATCH_SIZE, \r\n sort_key=lambda x: len(x.text), \r\n shuffle=False,\r\n device=device)\r\n \r\n \r\n # fetch model\r\n vocab_size = len(TEXT.vocab) # TEXT.vocab.vectors.size()\r\n# pretrained_vec = TEXT.vocab.vectors\r\n \r\n # selecte network \r\n x = import_module('networks.'+config.NETWORK)\r\n model = x.Model(vocab_size,embedding_pretrained=embedding_pretrained_matrix)\r\n \r\n # send model to device\r\n model.to(device)\r\n\r\n # initialize Adam optimizer\r\n optimizer = torch.optim.Adam(model.parameters(), lr=config.LEARNING_RATE)\r\n\r\n # if you have multiple GPUs, model model to DataParallel to use multiple GPUs\r\n if torch.cuda.device_count() > 1:\r\n model = nn.DataParallel(model)\r\n \r\n params_list = []\r\n # train and validate for all epochs\r\n for epoch in range(config.EPOCHS):\r\n epoch_start_time = time.time()\r\n\r\n ###----Train--------\r\n train_outputs, train_labels, train_loss = engine.train_fn(train_it, model, optimizer, device)\r\n train_outputs = torch.Tensor(train_outputs)\r\n _, train_predicted = torch.max(train_outputs, dim=1)\r\n train_parameters_dict = metrics_func.performance_evaluation_func(train_predicted,train_labels,epoch=str(epoch))\r\n # save train paremeters\r\n params_list.append(train_parameters_dict)\r\n train_f1 = train_parameters_dict['f1_score_macro']\r\n train_prec = train_parameters_dict['precision_macro']\r\n train_recall = train_parameters_dict['precision_macro']\r\n print('\\n')\r\n print(f\" Train Epoch: {epoch}, F1 = {train_f1},precision = {train_prec},recall = {train_recall}\")\r\n ###------------\r\n \r\n # validate\r\n val_outputs, val_labels, valid_loss = engine.evaluate_fn(valid_it, model, device)\r\n val_outputs = torch.Tensor(val_outputs)\r\n _, val_predicted = torch.max(val_outputs, dim=1) \r\n # calculate evaluation paremeters\r\n val_parameters_dict = metrics_func.performance_evaluation_func(val_predicted, val_labels, epoch=str(epoch),flag='val')\r\n # save evaluation paremeters\r\n params_list.append(val_parameters_dict)\r\n \r\n val_f1 = val_parameters_dict['f1_score_macro']\r\n val_prec = val_parameters_dict['precision_macro']\r\n val_recall = val_parameters_dict['recall_macro']\r\n print(f\"Val Epoch: {epoch},F1 = {val_f1},precision = {val_prec}, recall = {val_recall}\")\r\n \r\n ###-------Test-----------------------\r\n test_outputs, test_labels, test_loss = engine.evaluate_fn(test_it, model, device)\r\n test_outputs = torch.Tensor(test_outputs)\r\n _, test_predicted = torch.max(test_outputs, dim=1) \r\n # calculate evaluation paremeters\r\n test_parameters_dict = metrics_func.performance_evaluation_func(test_predicted, test_labels, epoch=str(epoch),flag='test')\r\n # save evaluation paremeters\r\n params_list.append(test_parameters_dict)\r\n \r\n test_f1 = test_parameters_dict['f1_score_macro']\r\n test_prec = test_parameters_dict['precision_macro']\r\n test_recall = test_parameters_dict['recall_macro']\r\n print(f\"test Epoch: {epoch},F1 = {test_f1},precision = {test_prec}, recall = {test_recall}\")\r\n \r\n lr_scheduler = LRScheduler(optimizer)\r\n lr_scheduler(valid_loss)\r\n \r\n \r\n # simple early stopping\r\n# val_f1 = float(val_f1)\r\n #f1 = (float(train_f1) + float(val_f1)) / 2\r\n val_loss = float(valid_loss)\r\n early_stopping(val_loss, model)\r\n if early_stopping.early_stop:\r\n print(\"Early stopping\")\r\n break\r\n # ่Žทๅพ— early stopping ๆ—ถ็š„ๆจกๅž‹ๅ‚ๆ•ฐ\r\n# model.load_state_dict(torch.load('checkpoint.pt'))\r\n\r\n# save_model_func(model, epoch, path='outputs')\r\n \r\n metrics_func.save_parameters_txt(params_list)", "def make_testing_training(data, percent_training, random_split=False, seed=None):\n ## Making testing and training sets\n data['computed Case Date/Time Closed'] = pd.to_datetime(data['Case Date/Time Closed'])\n ordered_data = data.sort(columns=['computed Case Date/Time Closed'])\n np.random.seed(seed=seed) \n nrows, ncols = ordered_data.shape\n\n if random_split:\n training_indices = np.random.choice(ordered_data.index, size=int(nrows*percent_training), replace=False)\n training = ordered_data.ix[training_indices]\n testing = ordered_data[~data['case_id'].isin(training['case_id'])]\n else: # split by date\n training_stop_index = int(percent_training * nrows)\n training = ordered_data[:training_stop_index]\n testing = ordered_data[training_stop_index:]\n\n return training, testing", "def generate_synthetic_data(args):\n number_training_obeservations = args.ntr\n number_testing_obeservations = args.nte\n number_dimensions = args.nd\n mu = args.mu\n feature_model = args.feature_model\n outcome_model = args.outcome_model\n sigma_outcome = args.sigma_outcome\n number_environments = args.ne\n \n T_train = generate_T(number_training_obeservations)\n T_test = generate_T(number_testing_obeservations)\n\n X_train, X_test = generate_x(number_dimensions, T_train, T_test, mu, feature_model)\n \n train_potential_outcome, test_potential_outcome = generate_outcomes(outcome_model, feature_model, X_train, X_test, sigma_outcome)\n\n train_po_control = train_potential_outcome[:,0].reshape(number_training_obeservations,1)\n train_po_treatment = train_potential_outcome[:,1].reshape(number_training_obeservations,1)\n\n y_train = np.multiply(T_train , train_po_treatment) + np.multiply(1-T_train , train_po_control)\n\n return X_train, T_train, y_train, X_test, T_test, test_potential_outcome", "def test_trainGenerator():\n\n # check type\n assert isinstance(trainset, surprise.trainset.Trainset)\n\n # the number of users in trainset should be equal to the user from database plus 1\n assert len(trainset.all_users()) == len(svd.song_df.user_id.unique())+1", "def generate_dataset():\n if not os.path.exists(\"../data/COVID-19/COVID-19.npy\"):\n print(\"Processing Training Data.\")\n training_data = get_training_data('../data/COVID-19/train')\n print(\"Processing Test Data.\")\n test_data = get_training_data('../data/COVID-19/test')\n\n x_train, y_train, x_test, y_test = [], [], [], []\n\n for feature, label in training_data:\n x_train.append(feature)\n y_train.append(label)\n\n for feature, label in test_data:\n x_test.append(feature)\n y_test.append(label)\n\n # Normalize the data\n x_train = np.array(x_train) / 255\n x_test = np.array(x_test) / 255\n\n # resize data for deep learning\n x_train = x_train.reshape(-1, 3, img_size, img_size)\n y_train = np.array(y_train)\n x_test = x_test.reshape(-1, 3, img_size, img_size)\n y_test = np.array(y_test)\n\n # With data augmentation to prevent overfitting and handling the imbalance in dataset\n dataset = {\"x_train\": x_train, \"y_train\": y_train, \"x_test\": x_test, \"y_test\": y_test}\n np.save(\"../data/COVID-19/COVID-19.npy\", dataset)\n else:\n dataset = np.load(\"../data/COVID-19/COVID-19.npy\", allow_pickle=True).item()\n x_train, y_train, x_test, y_test = dataset[\"x_train\"], dataset[\"y_train\"], dataset[\"x_test\"], dataset[\"y_test\"]\n\n x_train_tensor = torch.from_numpy(x_train)\n x_train_tensor = x_train_tensor.type(torch.FloatTensor)\n y_train_tensor = torch.from_numpy(y_train)\n y_train_tensor = y_train_tensor.type(torch.LongTensor)\n x_test_tensor = torch.from_numpy(x_test)\n x_test_tensor = x_test_tensor.type(torch.FloatTensor)\n y_test_tensor = torch.from_numpy(y_test)\n y_test_tensor = y_test_tensor.type(torch.LongTensor)\n\n train_dataset = TensorDataset(x_train_tensor, y_train_tensor)\n test_dataset = TensorDataset(x_test_tensor, y_test_tensor)\n\n return train_dataset, test_dataset", "def buildAndTrain(trainingData):\n\tname = trainingData.drop(['count', 'casual', 'registered'], axis=1).columns\n\ttarget = trainingData['count'].values\n\tfeature = trainingData.drop(['count', 'casual', 'registered'], axis=1).values\n\t# feature scaling\n\tfeature_scaled = preprocessing.scale(feature)\n\t# 0.5 cross validate\n\tcv = cross_validation.ShuffleSplit(len(feature_scaled), n_iter=5, test_size=0.2, random_state=0)\n\t# build model, then training and get accuracy of it\n\tprint('\\n---------ๅฒญๅ›žๅฝ’็ป“ๆžœ--------\\n')\n\tfor train, test in cv:\n\t\tregLR = linear_model.Ridge().fit(feature_scaled[train], target[train])\n\t\tprint('train score:{0:.3f}, test score:{1:.3f}\\n'.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tregLR.score(feature_scaled[train], target[train]),\n\t\t regLR.score(feature_scaled[test], target[test])))\n\tprint('\\n---------svm็ป“ๆžœ--------\\n')\n\tfor train, test in cv:\n\t\tregSvm = svm.SVR().fit(feature_scaled[train], target[train])\n\t\tprint('train score:{0:.3f}, test score:{1:.3f}\\n'.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tregSvm.score(feature_scaled[train], target[train]),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tregSvm.score(feature_scaled[test], target[test])))\n\tprint('\\n---------้šๆœบๆฃฎๆž—็ป“ๆžœ--------\\n')\n\tfor train, test in cv:\n\t\tregRF = RandomForestRegressor(n_estimators=100).fit(feature_scaled[train], target[train])\n\t\tprint('train score:{0:.3f}, test score:{1:.3f}\\n'.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tregRF.score(feature_scaled[train], target[train]),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tregRF.score(feature_scaled[test], target[test])))\n\t# reduce some low correction feature\n\tfeatureReduced = trainingData.drop(['count', 'casual', 'registered', 'holiday', 'workingday', 'day'], axis=1).values\n\tfeatureReduced_scaled = preprocessing.scale(featureReduced)\n\tprint('\\n---------ๅ‡ๅฐ‘็‰นๅพ็ปดๅบฆไปฅ้ฟๅ…่ฟ‡ๆ‹ŸๅˆๅŽ็š„้šๆœบๆฃฎๆž—็ป“ๆžœ--------\\n')\n\tfor train, test in cv:\n\t\tregRFImpr = RandomForestRegressor(n_estimators=100).fit(featureReduced_scaled[train], target[train])\n\t\tprint('train score:{0:.3f}, test score:{1:.3f}\\n'.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tregRFImpr.score(featureReduced_scaled[train], target[train]),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tregRFImpr.score(featureReduced_scaled[test], target[test])))\n\t# use grid search algorithm to improve random forest regression\n\tX_train, X_test, y_train, y_test = cross_validation.train_test_split(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfeature_scaled, target, test_size=0.2, random_state=0)\n\ttuned_parameters = [{'n_estimators': [10,100,500], 'max_depth': [2,3,4,5,6,7,8,9,10]}]\n\tscores = ['r2']\n\n\tfor score in scores:\n\t\tprint(score)\n\t\tclf = GridSearchCV(RandomForestRegressor(), tuned_parameters, cv=5, scoring=score)\n\t\tclf.fit(X_train, y_train)\n\t\tprint(clf.best_estimator_)\n\t\tprint('each parameter combination is ')\n\t\tfor params, mean_score, scores in clf.grid_scores_:\n\t\t\tprint('{0:.3f} (+/-{1:.03f}) for {2}'.format(mean_score, scores.std()/2, params))\n\n\tprint('--------ๆœ€ไผ˜ๅ‚ๆ•ฐไธ‹็š„้šๆœบๆฃฎๆž—็ป“ๆžœ--------')\n\tfor train, test in cv:\n\t\tregRFBest = RandomForestRegressor(n_estimators=100, max_depth=10).fit(feature_scaled[train], target[train])\n\t\tprint('train score:{0:.3f}, test score:{1:.3f}\\n'.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tregRFBest.score(feature_scaled[train], target[train]),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tregRFBest.score(feature_scaled[test], target[test])))\n\treturn regRFBest, feature_scaled, target", "def run_train_test(training_file, testing_file):\n\n training = parse_file(training_file)\n training = np.array(training)\n\n X_train = training[:,:4]\n Y_train = training[:,4]\n\n testing = parse_file(testing_file)\n testing = np.array(testing)\n\n X_test = testing[:,:4]\n Y_test = testing[:,4]\n\n gini_clf = DecisionTreeClassifier(random_state=0)\n gini_clf.fit(X_train, Y_train)\n gini_Y_hat = gini_clf.predict(X_test)\n gini_tp, gini_tn, gini_fp, gini_fn, gini_err = eval_results(Y_test, gini_Y_hat)\n\n entropy_clf = DecisionTreeClassifier(criterion=\"entropy\", random_state=0)\n entropy_clf.fit(X_train, Y_train)\n entropy_Y_hat = entropy_clf.predict(X_test)\n entropy_tp, entropy_tn, entropy_fp, entropy_fn, entropy_err = eval_results(Y_test, entropy_Y_hat)\n\n return {\n \"gini\":{\n 'True positives': gini_tp,\n 'True negatives': gini_tn,\n 'False positives': gini_fp,\n 'False negatives': gini_fn,\n 'Error rate': gini_err\n },\n \"entropy\":{\n 'True positives': entropy_tp,\n 'True negatives': entropy_tn,\n 'False positives': entropy_fp,\n 'False negatives': entropy_fn,\n 'Error rate': entropy_err\n }\n }", "def load_train_test_transactions(train_size=0.7):\n X, y = features_target_split()\n X_train, X_test, y_train, y_test = train_test_split(X,y,train_size=train_size, random_state=7)\n print('\\nTraining and testing data creation successful\\n')\n return X_train, X_test, y_train,y_test", "def load_data():\n print(\"PARSING TRAIN\")\n ys_train, x_train, ids_train = load_pickle_data(\"ys_train\"), load_pickle_data(\"x_train\"), load_pickle_data(\n \"ids_train\")\n if ys_train is None or x_train is None or ids_train is None:\n ys_train, x_train, ids_train = load_csv_data(\"{}/train.csv\".format(DATA_DIR))\n dump_pickle_data(ys_train, \"ys_train\")\n dump_pickle_data(x_train, \"x_train\")\n dump_pickle_data(ids_train, \"ids_train\")\n\n print(\"PARSING TEST\")\n x_test, ids_test = load_pickle_data(\"x_test\"), load_pickle_data(\"ids_test\")\n if x_test is None or ids_test is None:\n _, x_test, ids_test = load_csv_data(\"{}/test.csv\".format(DATA_DIR))\n dump_pickle_data(x_test, \"x_test\")\n dump_pickle_data(ids_test, \"ids_test\")\n\n return ys_train, x_train, ids_train, x_test, ids_test", "def make_training_set(ind_list, training_data): \n \n exp = training_data[ind_list[0]] \n X_train = exp[0]\n u_train = exp[1] \n\n for i in ind_list[1:]: \n exp = training_data[i]\n X_train = np.append(X_train, exp[0], axis=0)\n u_train = np.append(u_train, exp[1], axis=0)\n\n return X_train, u_train", "def create_train_test_sets(self,x,y,lenTest):\n \n nbInd = x.shape[0]\n shuffler = np.random.permutation(nbInd)\n x_train = x[shuffler][0:(nbInd-lenTest),]\n y_train = y[shuffler][0:(nbInd-lenTest),]\n\n x_test = x[shuffler][(nbInd-lenTest):nbInd,]\n y_test = y[shuffler][(nbInd-lenTest):nbInd,]\n\n return x_train,y_train,x_test,y_test", "def prepare_data(train_csv, test_csv):\n\n train = pd.read_csv(train_csv)\n test = pd.read_csv(test_csv)\n train = train[test.shape[0]:]\n valid = train[0:test.shape[0]]\n\n x_train = train.drop(columns=\"label\") / 255\n y_train = train.label\n x_valid = valid.drop(columns=\"label\") / 255\n y_valid = valid.label\n x_test = test.drop(columns=\"label\") / 255\n y_test = test.label\n\n y_train = tf.keras.utils.to_categorical(y_train)\n y_valid = tf.keras.utils.to_categorical(y_valid)\n y_test = tf.keras.utils.to_categorical(y_test)\n x_train = x_train.values\n x_valid = x_valid.values\n x_test = x_test.values\n\n return x_train, y_train, x_valid, y_valid, x_test, y_test", "def build_dataset(self, data_path, test_size):\n dataset, label_dataset = self.load_dataset(data_path)\n \n # shuffle \n dataset, label_dataset = shuffle(dataset, label_dataset, random_state = 2111)\n \n # split data \n size = int(len(dataset) * (1 - test_size)) \n self.x_train = dataset[:size]\n self.x_val = dataset[size:]\n self.y_train = np.array(label_dataset[:size])\n self.y_val = np.array(label_dataset[size:])\n self.vocab_size = len(self.x_train)\n \n # build tokenizer \n self.tokenizer = self.build_tokenizer(self.x_train, self.vocab_size)\n\n # Saving Tokenizer\n print('=============Saving Tokenizer================')\n print('Begin...')\n if not os.path.exists(self.vocab_folder):\n try:\n os.makedirs(self.vocab_folder)\n except OSError as e:\n raise IOError(\"Failed to create folders\")\n\n tokenizer_json = self.tokenizer.to_json()\n with io.open(self.save_tokenizer_path, 'w', encoding= 'utf-8') as f:\n f.write(json.dumps(tokenizer_json, ensure_ascii= False))\n print('Done!!!')\n\n # Saving label dict\n with open('label.json', 'w') as f:\n json.dump(self.label_dict, f)\n\n # get max_len \n self.max_len = self.get_max_len(self.x_train)\n \n # tokenizing \n self.x_train = np.array(self.tokenize(self.tokenizer, self.x_train, self.max_len))\n self.x_val = np.array(self.tokenize(self.tokenizer,self.x_val, self.max_len))\n return self.x_train, self.x_val, self.y_train, self.y_val", "def evaluate_training_on_testing(net_name, dobj, dir_path, t_start, batch_size=32, generator=g.DataGeneratorMultInput ,testing_files=None, **kwargs):\n opt_arg, kwargs = filter_keys(evaluate_net_defaults(), kwargs)\n \n wiki_data = {}\n for k, v in opt_arg.items():\n wiki_data[k] = str(v)\n \n t_string = date_to_file_string(t_start)\n \n ###\n \n if testing_files == None:\n global testing_file_names\n testing_files = testing_file_names\n \n tmp_files = []\n \n for f in testing_files:\n if os.path.isfile(os.path.join(dir_path, f)):\n tmp_files.append(f)\n \n testing_files = tmp_files\n \n ###\n \n print(\"Now loading the last model\")\n \n net_last = keras.models.load_model(os.path.join(dir_path, net_name + '.hf5'), custom_objects=custom_layers.get_custom_objects())\n \n print(\"Now loading the best model\")\n \n #Load networks\n if not opt_arg['best_epoch'] == 0:\n net_best = keras.models.load_model(os.path.join(dir_path, net_name + '_epoch_' + str(opt_arg['best_epoch']) + '.hf5'), custom_objects=custom_layers.get_custom_objects())\n else:\n net_best = None\n \n print(\"Now getting the data\")\n \n #Run predict generator on the test data for each net.\n tmp_prediction_paths_last = []\n tmp_prediction_paths_best = []\n for f in testing_files:\n tmp_prediction_paths_last.append(os.path.join(dir_path, os.path.splitext(f)[0] + '_predictions_last.hf5'))\n if not net_best == None:\n tmp_prediction_paths_best.append(os.path.join(dir_path, os.path.splitext(f)[0] + '_predictions_best.hf5'))\n \n dobj.set_file_path(f)\n dobj.unload_all()\n #dobj.get_set()\n print(\"dobj.shape: {}\".format(dobj.shape))\n dobj.get_formatted_data('testing', 'test_data')\n dobj.get_formatted_data('testing', 'test_labels')\n dobj.get_formatted_data('testing', 'test_snr_calculated')\n \n store_test_results(net_last, dobj, tmp_prediction_paths_last[-1], batch_size=batch_size, generator=generator)\n if not net_best == None:\n store_test_results(net_best, dobj, tmp_prediction_paths_best, batch_size=batch_size, generator=generator)\n \n prediction_path_last = os.path.join(dir_path, net_name + '_predictions_last_epoch_full_testing_' + t_string + '.hf5')\n join_test_results(tmp_prediction_paths_last, prediction_path_last, delete_copied_files=True)\n prediction_path_best = ''\n if not net_best == None:\n prediction_path_best = os.path.join(dir_path, net_name + '_predictions_best_epoch_full_testing_' + t_string + '.hf5')\n join_test_results(tmp_prediction_paths_best, prediction_path_best, delete_copied_files=True)\n \n #Make SNR plots\n SNR_plot_path_last = os.path.join(dir_path, net_name + '_snr_plot_last_epoch_full_testing_' + t_string + '.png')\n \n plot_true_and_calc_from_file(prediction_path_last, dobj, SNR_plot_path_last, show=opt_arg['show_snr_plot'], net_name=net_name + ' last epoch')\n \n SNR_plot_path_best = ''\n \n if not net_best == None:\n SNR_plot_path_best = os.path.join(dir_path, net_name + '_snr_plot_best_epoch_full_testing_' + t_string + '.png')\n \n plot_true_and_calc_from_file(prediction_path_best, dobj, SNR_plot_path_best, show=opt_arg['show_snr_plot'], net_name=net_name + ' best epoch')\n \n #Make false alarm plots\n false_alarm_plot_path_last = os.path.join(dir_path, net_name + '_false_alarm_plot_last_epoch_full_testing_' + t_string + '.png')\n \n tmp_false_alarm_path_last = plot_false_alarm(dobj, prediction_path_last, false_alarm_plot_path_last, show=opt_arg['show_false_alarm'])\n \n false_alarm_plot_prob_path_last = os.path.join(dir_path, net_name + '_false_alarm_plot_prob_last_epoch_full_testing_' + t_string + '.png')\n \n tmp_false_alarm_prob_path_last = plot_false_alarm_prob(dobj, prediction_path_last, false_alarm_plot_prob_path_last, show=opt_arg['show_false_alarm'])\n \n false_alarm_plot_path_best = ''\n \n false_alarm_plot_prob_path_best = ''\n \n tmp_false_alarm_path_best = ''\n \n tmp_false_alarm_prob_path_best = ''\n \n if not net_best == None:\n false_alarm_plot_path_best = os.path.join(dir_path, net_name + '_false_alarm_plot_best_epoch_full_testing_' + t_string + '.png')\n \n false_alarm_plot_prob_path_best = os.path.join(dir_path, net_name + '_false_alarm_plot_prob_best_epoch_full_testing_' + t_string + '.png')\n \n tmp_false_alarm_path_best = plot_false_alarm(dobj, prediction_path_best, false_alarm_plot_path_best, show=opt_arg['show_false_alarm'])\n \n tmp_false_alarm_prob_path_best = plot_false_alarm_prob(dobj, prediction_path_best, false_alarm_plot_prob_path_best, show=opt_arg['show_false_alarm'])\n \n #Make sensitivity plots\n snr_range = dobj.get_file_properties()['snr']\n \n sensitivity_plot_path_last = os.path.join(dir_path, net_name + '_sensitivity_plot_last_epoch_full_testing_' + t_string + '.png')\n \n sensitivity_plot_prob_path_last = os.path.join(dir_path, net_name + '_sensitivity_plot_prob_last_epoch_full_testing_' + t_string + '.png')\n \n plot_sensitivity(dobj, prediction_path_last, tmp_false_alarm_path_last, sensitivity_plot_path_last, bins=(snr_range[0]+1, snr_range[1], 1), show=opt_arg['show_sensitivity_plot'])\n \n plot_sensitivity_prob_from_pred_file(prediction_path_last, sensitivity_plot_prob_path_last, bins=(snr_range[0]+1, snr_range[1], 1))\n #plot_sensitivity_prob(dobj, prediction_path_last, tmp_false_alarm_prob_path_last, sensitivity_plot_prob_path_last, show=opt_arg['show_sensitivity_plot'])\n \n sensitivity_plot_path_best = ''\n \n sensitivity_plot_prob_path_best = ''\n \n if not net_best == None:\n sensitivity_plot_path_best = os.path.join(dir_path, net_name + '_sensitivity_plot_best_epoch_full_testing_' + t_string + '.png')\n \n sensitivity_plot_prob_path_best = os.path.join(dir_path, net_name + '_sensitivity_plot_prob_best_epoch_full_testing_' + t_string + '.png')\n \n plot_sensitivity(dobj, prediction_path_best, tmp_false_alarm_path_best, sensitivity_plot_path_best, bins=(snr_range[0], snr_range[1], 1), show=opt_arg['show_sensitivity_plot'])\n \n plot_sensitivity_prob_from_pred_file(prediction_path_best, sensitivity_plot_prob_path_best, bins=(snr_range[0]+1, snr_range[1], 1))\n #plot_sensitivity_prob(dobj, prediction_path_best, tmp_false_alarm_prob_path_best, sensitivity_plot_prob_path_best, show=opt_arg['show_sensitivity_plot'])\n \n return((SNR_plot_path_last, false_alarm_plot_path_last, false_alarm_plot_prob_path_last, sensitivity_plot_path_last, sensitivity_plot_prob_path_last, SNR_plot_path_best, false_alarm_plot_path_best, false_alarm_plot_prob_path_best, sensitivity_plot_path_best, sensitivity_plot_prob_path_best))", "def generate_data(self):\n\n # cfg.d_sour_num = 20 # number of source domains\n self.d_sour_a = [np.random.uniform(0.1, 5.0) for _ in range(cfg.d_sour_num)]\n self.d_sour_b = [np.random.uniform(0, np.pi) for _ in range(cfg.d_sour_num)]\n # cfg.d_targ_num = 1 # number of target domain\n self.d_targ_a = [np.random.uniform(0.1, 5.0) for _ in range(cfg.d_targ_num)]\n self.d_targ_b = [np.random.uniform(0, np.pi) for _ in range(cfg.d_targ_num)]\n\n\n # cfg.train_num = 100 # number of training point in each domain\n self.train_x = np.array([np.random.uniform(-5.0, 5.0) for _ in range(cfg.train_num)], dtype=np.float32).reshape(-1,1)\n self.train_y = np.array([[self.d_sour_a[j] * np.sin(i + self.d_sour_b[j]) for i in self.train_x] for j in range(cfg.d_sour_num)], dtype=np.float32).reshape(cfg.d_sour_num, cfg.train_num, 1)\n\n # cfg.val_num = 100\n self.val_x = np.array([np.random.uniform(-5.0, 5.0) for _ in range(cfg.val_num)], dtype=np.float32).reshape(-1,1)\n self.val_y = np.array([[self.d_sour_a[j] * np.sin(i + self.d_sour_b[j]) for i in self.val_x] for j in range(cfg.d_sour_num)], dtype=np.float32).reshape(cfg.d_sour_num, cfg.val_num, 1)\n\n # cfg.support_num = 10\n self.support_x = np.array([np.random.uniform(-5.0, 5.0) for _ in range(cfg.support_num)], dtype=np.float32).reshape(-1,1)\n self.support_y = np.array([[self.d_targ_a[j] * np.sin(i + self.d_targ_b[j]) for i in self.support_x] for j in range(cfg.d_targ_num)], dtype=np.float32).reshape(cfg.d_targ_num, cfg.support_num, 1)\n\n # cfg.test_num = 100\n self.test_x = np.array([np.random.uniform(-5.0, 5.0) for _ in range(cfg.test_num)], dtype=np.float32).reshape(-1,1)\n self.test_y = np.array([[self.d_targ_a[j] * np.sin(i + self.d_targ_b[j]) for i in self.test_x] for j in range(cfg.d_targ_num)], dtype=np.float32).reshape(cfg.d_targ_num, cfg.test_num, 1)\n\n self.test_x_old = np.array([np.random.uniform(-5.0, 5.0) for _ in range(cfg.test_num)], dtype=np.float32).reshape(-1,1)\n self.test_y_old = np.array([[self.d_sour_a[j] * np.sin(i + self.d_sour_b[j]) for i in self.test_x_old] for j in range(cfg.d_sour_num)], dtype=np.float32).reshape(cfg.d_sour_num, cfg.test_num, 1)", "def create_simple_data_set(\n n_training_points,\n n_testing_points,\n low=0,\n high=3,\n mode=training_testing_split.SEPERATE,\n kernel=kernel_matern,\n shuffle=True,\n):\n gp = gaussian_process(kernel=kernel, verbose=True)\n\n mid = (low + high) / 2\n\n if mode == training_testing_split.SEPERATE_LONG:\n x_training, x_testing = __seperate_long(\n n_training_points, n_testing_points, low, high\n )\n elif mode == training_testing_split.SEPERATE:\n x_training, x_testing = __seperate(\n n_training_points, n_testing_points, low, high\n )\n elif mode == training_testing_split.INTERSPREAD:\n x_training, x_testing = __interspread(\n n_training_points, n_testing_points, low, high\n )\n elif mode == training_testing_split.RANDOM:\n x_training, x_testing = __random(n_training_points, n_testing_points, low, high)\n elif mode == training_testing_split.MIXED:\n\n def r(z):\n dist = np.random.randint(low=1, high=100, size=4)\n ฮป = lambda x: x / dist.sum()\n vfunc = np.vectorize(ฮป)\n dist = vfunc(dist)\n return (z * dist).round().astype(int)\n\n training_dist = r(n_training_points)\n testing_dist = r(n_testing_points)\n x1, x2 = __random(training_dist[0], testing_dist[0], low, high)\n x11, x22 = __interspread(training_dist[1], testing_dist[1], low, high)\n x111, x222 = __interspread(training_dist[2], testing_dist[2], low, high)\n x1111, x2222 = __seperate(training_dist[3], testing_dist[3], low, high)\n x_training = np.vstack([x1, x11, x111, x1111])\n x_testing = np.vstack([x2, x22, x222, x222])\n\n y_samples = gp.sample(np.vstack([x_training, x_testing]), 1).squeeze()\n y_training = y_samples[: len(x_training)].reshape(-1, 1)\n y_testing = y_samples[len(x_training) :].reshape(-1, 1)\n training_data_set = data_loader.DataSet(X=x_training, Y=y_training)\n testing_data_set = data_loader.DataSet(X=x_testing, Y=y_testing)\n\n if shuffle:\n training_data_set.shuffle()\n testing_data_set.shuffle()\n\n return training_data_set, testing_data_set", "def create_train_sets(self, proportion_val):\n l_path = os.listdir(self.image_folder_path)\n lr_path = random.sample(l_path, len(l_path))\n val_files = lr_path[: round(proportion_val * len(lr_path))]\n train_files = lr_path[round(proportion_val * len(lr_path)) :]\n delete_files(self.root_name, \"/VOC2021/ImageSets/Main\")\n write_txt(\"train.txt\", self.txt_path, train_files)\n write_txt(\"val.txt\", self.txt_path, val_files)", "def get_training_data(self):\n\n # this actually never was a set\n # src_set = self.target['src'].values\n # dst_set = self.target['dst'].values\n\n # train_negative = self.get_negative_edges(src_set, dst_set, self.train_ind.shape[0]) # * self.K)\n # test_negative = self.get_negative_edges(src_set, dst_set, self.test_ind.shape[0])\n\n train_positive = self.target.iloc[self.train_edge_ind].values\n test_positive = self.target.iloc[self.test_edge_ind].values\n\n # # print(train_positive.shape, train_negative.shape, test_positive.shape, test_negative.shape)\n # print(f\"Working with {train_positive.shape[0]} positive and {train_negative.shape[0]} negative samples in the train set, {test_positive.shape[0]} and {test_negative.shape[0]} - in test set\")\n\n X_train = train_positive\n X_test = test_positive\n\n y_train = np.ones((self.train_edge_ind.shape[0],))\n y_test = np.ones((self.test_edge_ind.shape[0],))\n\n # X_train = np.vstack([\n # train_positive,\n # train_negative\n # ])\n\n # X_test = np.vstack([\n # test_positive,\n # test_negative\n # ])\n\n # y_train = np.concatenate([np.ones((self.train_ind.shape[0],)), np.zeros((self.train_ind.shape[0]),)]) # self.train_ind.shape[0]) * self.K\n # y_test = np.concatenate([np.ones((self.test_ind.shape[0],)), np.zeros((self.test_ind.shape[0],))])\n\n assert X_train.shape[0] == y_train.shape[0]\n assert X_test.shape[0] == y_test.shape[0]\n\n def shuffle(X, y):\n ind_shuffle = np.arange(0, X.shape[0])\n np.random.shuffle(ind_shuffle)\n return X[ind_shuffle], y[ind_shuffle]\n\n self.X_train, self.y_train = shuffle(X_train, y_train)\n self.X_test, self.y_test = shuffle(X_test, y_test)\n\n print(f\"Splitting into {self.X_train.shape[0]} train and {self.X_test.shape[0]} test samples\")\n\n # return X_train, X_test, y_train, y_test", "def generate_data(self,seed):\n X, y = make_classification( n_samples = 250, random_state = seed )\n # Add bias term\n X = np.concatenate( ( np.ones( ( 250, 1 ) ), X ), axis = 1 )\n self.X_train, self.X_test, self.y_train, self.y_test = train_test_split( \n X, y, test_size = 50, random_state = seed )", "def _load_training_and_test_sets(normalize):\n class_labels = []\n test_labels = []\n norm = None\n if normalize == True:\n norm = loading.get_normalize_vector()\n\n for i in range(0, 10):\n [training, test] = loading.load_number_set(i, 0.7, norm_vector=norm)\n labels = [str(i)] * training.shape[0]\n tlabels = [str(i)] * test.shape[0]\n if i == 0:\n train_points = training\n test_points = test\n else:\n train_points = np.concatenate((train_points, training), axis = 0)\n test_points = np.concatenate((test_points, test), axis = 0)\n class_labels.extend(labels)\n test_labels.extend(tlabels)\n\n return train_points, test_points, class_labels, test_labels", "def get_training_and_validation_generators(data_file, batch_size, n_labels, training_keys_file, validation_keys_file,\n data_split=0.8, overwrite=False):\n training_list, validation_list = get_validation_split(data_file, data_split=data_split, overwrite=overwrite,\n training_file=training_keys_file,\n testing_file=validation_keys_file)\n training_generator = data_generator(data_file, training_list, batch_size=batch_size, n_labels=n_labels)\n validation_generator = data_generator(data_file, validation_list, batch_size=1, n_labels=n_labels)\n # Set the number of training and testing samples per epoch correctly\n num_training_steps = len(training_list)//batch_size\n num_validation_steps = len(validation_list)\n return training_generator, validation_generator, num_training_steps, num_validation_steps", "def prepare_data_for_training(args):\n # Form the train/test splits and write them to disk\n dataset = data.Dataset(args)\n # get image classes and image counts in each class\n label_map = dataset.get_class_info()\n class_count = len(list(label_map.values()))\n # split the data and store it in log dir\n df_train, df_test = dataset.split_dataset()\n\n # perform dataset augmentations\n image_data = augment.Augmentation(args)\n # get the data gens for training and test images\n train_data_gen, _ = image_data.map_fn_train(df_train)\n test_data_gen, _ = image_data.map_fn_test(df_test)\n\n return train_data_gen, test_data_gen, df_train, df_test, class_count", "def train(self):\n self.dataGenerator.printDataStatistics()\n sE = len(self.dataGenerator.ids[\"train\"])// 32\n sV = len(self.dataGenerator.ids[\"validation\"])// 32\n self.model.fit_generator(\n generator=self.dataGenerator.trainingGenerator,\n steps_per_epoch= sE,\n epochs=2,\n validation_data=self.dataGenerator.validationGenerator,\n validation_steps=sV,\n # use_multiprocessing=True,\n # workers=2,\n )", "def create_dataset():\n x_old, y_old = clean_scores_version1()\n\n # delete duplicates\n x_old = np.unique(x_old, axis=0)\n\n file = open('/Users/kira/Desktop/uni/Connect4/agents/agent_supervised_ml/unlabeled2.txt', \"a\")\n\n for row in x_old:\n string = ''\n move_seq = row[row != 0]\n for move in move_seq:\n string = string + str(move)\n for i in range(1, 8):\n file.write(string + str(i) + '\\n')\n\n file.close()", "def build_enru_custom_test(self):\n train_data_file = self.data_dir + '/' + enru_paracrawl\n eval_data_file = self.data_dir + '/' + enru_newscomm\n train_data = tf.data.experimental.CsvDataset(\n [train_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n train_data = train_data.cache() # only read once\n eval_data = tf.data.experimental.CsvDataset(\n [eval_data_file],\n record_defaults=[tf.string, tf.string],\n compression_type='GZIP',\n field_delim='\\t',\n use_quote_delim=False)\n\n eval_data = eval_data.skip(9000).take(10000)\n eval_data = eval_data.cache()\n def to_features_dict(eng, rus):\n return {'inputs': eng, 'targets': rus}\n\n train_data = train_data.map(to_features_dict)\n eval_data = eval_data.map(to_features_dict)\n\n self.default_builder_obj = None\n\n return train_data, eval_data", "def test_generate_nb_training(self):\n facade = ChatetteFacade.get_or_create()\n\n input_dir_path = \\\n \"tests/system-testing/inputs/generate-nb/training-only/\"\n input_filenames = [\n \"only-words.chatette\", \"words-and-groups.chatette\",\n \"alias.chatette\", \"include.chatette\", \"slot.chatette\",\n \"bugfixes/bug-22-slot-position.chatette\"\n ]\n for filename in input_filenames:\n file_path = os.path.join(input_dir_path, filename)\n facade.run(file_path)\n # if not TestSystem.check_no_duplicates(facade.train_examples): # TODO: make sure there are no duplicates in this case\n # pytest.fail(\"Some examples were generated several times \"+\n # \"when dealing with file '\"+filename+\"'.\\n\"+\n # \"Generated: \"+str(facade.train_examples))\n legal_examples = TestSystem.get_legal_examples(file_path)\n for ex in facade.train_examples:\n formatted_ex = {\"intent\": ex.intent_name, \"text\": ex.text}\n if formatted_ex not in legal_examples:\n pytest.fail(\n str(formatted_ex) + \" is not a legal example for '\" + \\\n file_path + \"'\"\n )\n \n legal_syn = TestSystem.get_legal_synonyms(file_path)\n if legal_syn is not None:\n synonyms = AST.get_or_create().get_entities_synonyms()\n for key in synonyms:\n if key not in legal_syn:\n pytest.fail(\n \"'\" + key + \"' shouldn't have any synonyms.\"\n )\n for syn in synonyms[key]:\n if syn not in legal_syn[key]:\n pytest.fail(\n \"'\" + syn + \"' shouldn't be a synonym of '\" + \\\n key + \"'\"\n )\n\n filename_zero = \"zero-ex.chatette\"\n file_path = os.path.join(input_dir_path, filename_zero)\n facade.run(file_path)\n if len(facade.train_examples) != 0:\n pytest.fail(\n \"When dealing with file 'zero-ex.chatette', no examples \" + \\\n \"should be generated.\\nGenerated: \" + \\\n str(facade.train_examples)\n )\n\n filename_one = \"one-ex.chatette\"\n file_path = os.path.join(input_dir_path, filename_one)\n facade.run(file_path)\n print(\"TRAIN EX: \" + str(facade.train_examples))\n if len(facade.train_examples) != 1:\n pytest.fail(\n \"When dealing with file 'one-ex.chatette', one examples \" + \\\n \"should be generated.\\nGenerated: \" + \\\n str(facade.train_examples)\n )", "def build_training_data():\r\n for i in range(len(FILE_NAMES)):\r\n input_text = read_file(FILE_NAMES[i])\r\n list_of_word_lines = limiting_sentence_length(input_text)\r\n data = create_training_data_file(list_of_word_lines, LANGUAGE[i])\r\n write_training_data(data, LANGUAGE[i])\r\n merge_training_data()", "def make_generators():\n \n # All images will be rescaled by 1./255\n train_datagen = ImageDataGenerator(rescale=1./255)\n test_datagen = ImageDataGenerator(rescale=1./255)\n\n \n train_generator = train_datagen.flow_from_directory(\n TRAIN_DATA_PATH,\n target_size= (150, 150),\n batch_size= 20,\n class_mode= 'sparse')\n\n validation_generator = test_datagen.flow_from_directory(\n VAL_DATA_PATH,\n target_size= (150, 150),\n batch_size= 20,\n class_mode= 'sparse')\n\n return train_generator, validation_generator", "def prepareData(mdl, X_train, X_val, X_test, Y_train, Y_val, Y_test):\n\t\n\tX2_train = []\n\n\tX2_val = []\n\n\tX2_test = []\n\n\n\tYPs = mdl.predict(X_train)\n\n\tfor i in tqdm(range(len(X_train))):\n\n\t\tX2_train.append(np.array(YPs[i]))\n\n\n\n\tYPs = mdl.predict(X_val)\n\n\tfor i in tqdm(range(len(X_val))):\n\n\t\tX2_val.append(np.array(YPs[i]))\n\n\n\n\tYPs = mdl.predict(X_test)\n\n\tfor i in tqdm(range(len(X_test))):\n\n\t\tX2_test.append(np.array(YPs[i]))\n\n\n\tX2_train = np.array(X2_train)\n\n\tX2_val = np.array(X2_val)\n\n\tX2_test = np.array(X2_test)\n\n\treturn (X2_train, X2_val, X2_test)", "def test_data():\n batch_size = 10\n input_dim = 28\n test_data = np.random.rand(batch_size, input_dim)\n\n return test_data", "def build_all_datasets(\n cfg, tokenizer, train_valid_test_num_samples,\n):\n train_dataset = RetroQAFineTuneDataset(\n cfg.train_ds.get('file_name'),\n tokenizer,\n cfg.train_ds.get('answer_only_loss'),\n tokenizer.pad_id,\n cfg.train_ds.get('seq_length'),\n cfg.train_ds.get('add_bos'),\n cfg.train_ds.get('add_eos'),\n train_valid_test_num_samples[0],\n cfg.train_ds.get('seed'),\n cfg.train_ds.get('neighbors'),\n )\n val_dataset = RetroQAFineTuneDataset(\n cfg.val_ds.get('file_name'),\n tokenizer,\n cfg.val_ds.get('answer_only_loss'),\n tokenizer.pad_id,\n cfg.val_ds.get('seq_length'),\n cfg.val_ds.get('add_bos'),\n cfg.val_ds.get('add_eos'),\n train_valid_test_num_samples[1],\n cfg.val_ds.get('seed'),\n cfg.val_ds.get('neighbors'),\n )\n test_dataset = RetroQAFineTuneDataset(\n cfg.test_ds.get('file_name'),\n tokenizer,\n cfg.test_ds.get('answer_only_loss'),\n tokenizer.pad_id,\n cfg.test_ds.get('seq_length'),\n cfg.test_ds.get('add_bos'),\n cfg.test_ds.get('add_eos'),\n train_valid_test_num_samples[2],\n cfg.test_ds.get('seed'),\n cfg.test_ds.get('neighbors'),\n )\n\n return train_dataset, val_dataset, test_dataset", "def build_data_set(self):\n if not self.assert_data_correct():\n self.download_all_data()\n self.unpack_rename_data()\n self.split_data_characters()\n self.clean_data_fragments()\n self.create_font_data()\n if not self.assert_train_augmented():\n self.augment_train_data()\n if not self.assert_style_data_correct():\n self.download_style_data()\n self.unpack_rename_data()", "def prepare_data(dataset, train_ratio=0.8, input_dim=None, seed=10):\n # Retrieve main path of project\n dirname = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))\n\n # Download and store dataset at chosen location\n if dataset == 'Cora' or dataset == 'PubMed' or dataset == 'Citeseer':\n path = os.path.join(dirname, 'data')\n data = Planetoid(path, name=dataset, split='full')[0]\n data.name = dataset\n data.num_classes = (max(data.y)+1).item()\n # data.train_mask, data.val_mask, data.test_mask = split_function(data.y.numpy())\n # data = Planetoid(path, name=dataset, split='public', transform=T.NormalizeFeatures(), num_train_per_class=20, num_val=500, num_test=1000)\n\n elif dataset == 'Amazon':\n path = os.path.join(dirname, 'data', 'Amazon')\n data = Amazon(path, 'photo')[0]\n data.name = dataset\n data.num_classes = (max(data.y)+1).item()\n data.train_mask, data.val_mask, data.test_mask = split_function(\n data.y.numpy(), seed=seed)\n # Amazon: 4896 train, 1224 val, 1530 test\n \n elif dataset in ['syn1', 'syn2', 'syn4', 'syn5']: \n data = synthetic_data(\n dataset, dirname, train_ratio, input_dim)\n \n elif dataset == 'syn6':\n data = gc_data(dataset, dirname, train_ratio)\n\n elif dataset == 'Mutagenicity':\n data = gc_data(dataset, dirname, train_ratio)\n\n return data", "def load_train_data():\r\n X_train = np.load('data/train/X_train.npy')\r\n scaling_train = np.load('data/train/scaling_train.npy')\r\n ids_train = np.load('data/train/ids_train.npy')\r\n y_train = np.load('data/train/y_train.npy')\r\n\r\n seed = np.random.randint(1, 10e6)\r\n np.random.seed(seed)\r\n np.random.shuffle(X_train)\r\n np.random.seed(seed)\r\n np.random.shuffle(scaling_train)\r\n np.random.seed(seed)\r\n np.random.shuffle(ids_train)\r\n np.random.seed(seed)\r\n np.random.shuffle(y_train)\r\n\r\n return X_train, scaling_train, ids_train, y_train", "def train_test_model_batch():\n train=learning.Train_kmer_clf()\n train.run()", "def autogen_dataset_with_test():\n return TabularDataset.autogen('tests/data/dummy_tabular/train.csv',\n test_path='tests/data/dummy_tabular_test/test.csv',\n seed=42,\n sep=',')", "def load_data():\n # Load and preprocess data\n x_text_train1, x_text_train2, x_text_dev1, x_text_dev2, y_train, y_dev = load_data_and_labels_without_shuffled()\n\n x_text_train1 = split_sentence(x_text_train1)\n x_text_train2 = split_sentence(x_text_train2)\n x_text_dev1 = split_sentence(x_text_dev1)\n x_text_dev2 = split_sentence(x_text_dev2)\n\n x_text_train1 = pad_sentences(x_text_train1)\n x_text_train2 = pad_sentences(x_text_train2)\n x_text_dev1 = pad_sentences(x_text_dev1)\n x_text_dev2 = pad_sentences(x_text_dev2)\n\n # sentences = x_text_train1 + x_text_train2 + x_text_dev1 + x_text_dev2\n # vocabulary, vocabulary_inv = build_vocab(sentences)\n # x_text_train1 = build_input_data(x_text_train1, vocabulary)\n # x_text_train2 = build_input_data(x_text_train2, vocabulary)\n # x_text_dev1 = build_input_data(x_text_dev1, vocabulary)\n # x_text_dev2 = build_input_data(x_text_dev2, vocabulary)\n\n x_train1 = sentence_word2vec(x_text_train1)\n x_train2 = sentence_word2vec(x_text_train2)\n x_dev1 = sentence_word2vec(x_text_dev1)\n x_dev2 = sentence_word2vec(x_text_dev2)\n\n y_train = np.array(y_train)\n y_dev = np.array(y_dev)\n # return [x_text_train1, x_text_train2, x_text_dev1, x_text_dev2, y_train, y_dev, vocabulary, vocabulary_inv]\n\n return [x_train1, x_train2, x_dev1, x_dev2, y_train, y_dev]", "def setUpTestData(cls):\n data_gen.run()", "def setUpTestData(cls):\n data_gen.run()", "def setUpTestData(cls):\n data_gen.run()", "def setUpTestData(cls):\n data_gen.run()", "def test_text_classifier_get_training_samples(self):\n pass", "def split_data_into_train_and_test(raw_training_data):\n train_set, test_set = train_test_split(raw_training_data, test_size=0.2, random_state=42)\n return train_set, test_set", "def prepare_jsonlbpe_data(data_dir, train_data_file, dev_data_file, vocab_file):\n if not gfile.Exists(data_dir):\n gfile.MkDir(data_dir)\n\n # Get wmt data to the specified directory.\n train_path = get_qa_set(data_dir, train_data_file)\n dev_path = get_qa_set(data_dir, dev_data_file)\n\n # Create vocabularies of the appropriate sizes.\n vocab_path = os.path.join(data_dir, \"vocab.txt\")\n create_vocabulary(vocab_path, vocab_file)\n\n # Create token ids for the training data.\n src_train_ids_path = train_path + \".src.ids\"\n targ_train_ids_path = train_path + \".targ.ids\"\n data_to_token_ids(train_path + \".src\", src_train_ids_path, vocab_path)\n data_to_token_ids(train_path + \".targ\", targ_train_ids_path, vocab_path)\n\n # Create token ids for the development data.\n src_dev_ids_path = dev_path + \".src.ids\"\n targ_dev_ids_path = dev_path + \".targ.ids\"\n data_to_token_ids(dev_path + \".src\", src_dev_ids_path, vocab_path)\n data_to_token_ids(dev_path + \".targ\", targ_dev_ids_path, vocab_path)\n\n return (src_train_ids_path, targ_train_ids_path,\n src_dev_ids_path, targ_dev_ids_path,\n vocab_path)", "def generateTrainAndValidateset(trainSets, validateSets, validatePercentage=20):\n\tvalidateFiles = []\n\ttrainFiles = []\n\n\tfor validateSet in validateSets:\n\t\tif \".\" in validateSet:\n\t\t\tvalidateSet, percentage = validateSet.split(\".\")\n\n\t\t\tif percentage == \"all\":\n\t\t\t\t#overwrite any further checks and security measures, just append all files:\n\t\t\t\tvalidateFiles += getAllFiles([validateSet])\n\t\t\t\tcontinue\n\n\t\t\tpercentage = int(percentage)\n\t\telse:\n\t\t\tpercentage = validatePercentage\n\n\t\tif validateSet not in _dataSets:\n\t\t\traise ValueError(\"Not a valid validate set: \" + validateSet)\n\n\t\tallFiles = sorted(filter(lambda x: x.endswith(\".txt\"), os.listdir(_dataSets[validateSet])))\n\t\tallFiles = list(map(lambda x: _dataSets[validateSet] + x, allFiles))\n\t\trandom.seed(42) #make sure all lists are randomized equally each time\n\t\trandom.shuffle(allFiles)\n\n\t\tallAroused = list(filter(lambda x: isAroused(x), allFiles))\n\t\tallNonAroused = list(filter(lambda x: not isAroused(x), allFiles))\n\n\t\tvalidateFiles += allAroused[len(allAroused) - int(percentage * len(allFiles) / 100 / 2):]\n\t\tvalidateFiles += allNonAroused[len(allNonAroused) - int(percentage * len(allFiles) / 100 / 2):]\n\n\n\tfor trainSet in trainSets:\n\t\tif \".\" in trainSet:\n\t\t\ttrainSet, percentage = trainSet.split(\".\", 1)\n\n\t\t\tif percentage == \"all\":\n\t\t\t\t#overwrite any further checks and security measures, just append all files:\n\t\t\t\ttrainFiles += getAllFiles([trainSet])\n\t\t\t\tcontinue\n\n\t\t\tpercentage = int(percentage)\n\t\telse:\n\t\t\tpercentage = 100 - validatePercentage\n\t\t\tvalidatePercentage = validatePercentage\n\n\t\tif trainSet not in _dataSets:\n\t\t\traise ValueError(\"Not a valid train set: \" + trainSet)\n\n\t\tallFiles = sorted(filter(lambda x: x.endswith(\".txt\"), os.listdir(_dataSets[trainSet])))\n\t\tallFiles = list(map(lambda x: _dataSets[trainSet] + x, allFiles))\n\t\trandom.seed(42) #make sure all lists are randomized equally each time\n\t\trandom.shuffle(allFiles)\n\n\t\tallAroused = list(filter(lambda x: isAroused(x), allFiles))\n\t\tallNonAroused = list(filter(lambda x: not isAroused(x), allFiles))\n\n\t\ttrainFiles += filter(lambda x: x not in validateFiles, allAroused[:int(percentage * len(allFiles) / 100 / 2)])\n\t\ttrainFiles += filter(lambda x: x not in validateFiles, allNonAroused[:int(percentage * len(allFiles) / 100 / 2)])\n\n\tif not any(map(lambda x: x.endswith(\".all\"), list(trainSets) + list(validateSets))):\n\t\t#assert no validatefiles are also trainfiles\n\t\tassert(set(trainFiles) - set(validateFiles) == set(trainFiles))\n\t\t#assert an equal amount of aroused and non-aroused validatefiles\n\t\tassert(len(list(filter(isAroused, validateFiles))) == len(validateFiles) / 2)\n\n\treturn trainFiles, validateFiles", "def prepare_data(self):\n try:\n self.train_dataset = self.datasets['train']\n self.val_dataset = self.datasets['val']\n try:\n self.test_dataset = self.datasets['test']\n except:\n pass\n except Exception as e:\n print('Data was not succesfully prepared:', e)", "def get_training_data():\n \n X = pd.read_csv('../data/train_values.csv').set_index('sequence_id')\n y = pd.read_csv('../data/train_labels.csv').set_index('sequence_id')\n return X, y", "def test_text_classifier_add_training_samples(self):\n pass", "def prepare_data(self):\n data = self._get_dataset(self.hparams.dataset_path)\n label_encoder = data[\"label_encoder\"]\n del data[\"label_encoder\"]\n\n click.secho(\"Building inputs and labels.\", fg=\"yellow\")\n datasets = {\n \"train\": defaultdict(list),\n \"valid\": defaultdict(list),\n \"test\": defaultdict(list),\n }\n for dataset_name, dataset in data.items():\n for sample in dataset:\n instance = self.build_input(\n self.tokenizer, sample[\"text\"], label_encoder, sample[\"label\"]\n )\n for input_name, input_array in instance.items():\n datasets[dataset_name][input_name].append(input_array)\n\n click.secho(\"Padding inputs and building tensors.\", fg=\"yellow\")\n tensor_datasets = {\"train\": [], \"valid\": [], \"test\": []}\n for dataset_name, dataset in datasets.items():\n dataset = self.pad_dataset(dataset, padding=self.tokenizer.pad_index)\n for input_name in MODEL_INPUTS:\n if input_name == \"labels\":\n tensor = torch.tensor(dataset[input_name], dtype=torch.float32)\n else:\n tensor = torch.tensor(dataset[input_name])\n tensor_datasets[dataset_name].append(tensor)\n\n self.train_dataset = TensorDataset(*tensor_datasets[\"train\"])\n self.valid_dataset = TensorDataset(*tensor_datasets[\"valid\"])\n self.test_dataset = TensorDataset(*tensor_datasets[\"test\"])\n click.secho(\n \"Train dataset (Batch, Candidates, Seq length): {}\".format(\n self.train_dataset.tensors[0].shape\n ),\n fg=\"yellow\",\n )\n click.secho(\n \"Valid dataset (Batch, Candidates, Seq length): {}\".format(\n self.valid_dataset.tensors[0].shape\n ),\n fg=\"yellow\",\n )\n click.secho(\n \"Test dataset (Batch, Candidates, Seq length): {}\".format(\n self.test_dataset.tensors[0].shape\n ),\n fg=\"yellow\",\n )", "def create_train_feats():\n features = read_process_labelled(AUDIO_DIR, debug=True)\n df = pd.DataFrame(features)\n p = './Features/dataset_features/data_features.csv'\n df.to_csv(p, index=False)\n return p", "def trainData(self, X, y, NeuralNet, epochs):", "def train_and_test(self, data):\n\n np.random.shuffle(data)\n datalist = self.unpack_data(data)\n\n logger.info('[*] 75-25 partition of datasets ...')\n\n markline1 = math.floor(0.75*(len(datalist['features'])))\n markline2 = math.floor(0.75*len(datalist['labels']))\n\n train_features = datalist['features'][:(markline1)]\n test_features = datalist['features'][(markline1):]\n \n train_labels = datalist['labels'][:(markline2)]\n test_labels = datalist['labels'][(markline2):]\n\n logger.info('[*] Training started with 75% Dataset ...')\n\n self.knn_model.fit(train_features, train_labels)\n\n logger.info('[*] Testing started with 25% Dataset ...')\n print('\\n/---------------Accuracy----------------/') \n \n accuracy = self.knn_model.score(train_features, train_labels)\n print('Test set accuracy {:.2f} %'.format(accuracy*100))\n\n if accuracy < 0.40:\n logger.warning('[-.-!] Thanks for tryin\\' but this machine ain\\'t learning.')\n\n return True", "def dataset_preparation():\r\n with open('../data/patterns_num.txt', 'r') as f:\r\n data = f.readlines()\r\n X, Y = [], []\r\n for line in data:\r\n x, y = line.split('\\t')\r\n if len(x) > 5 and x not in X: # better results are achieved excluding short query patterns\r\n X.append(x.replace(\"X\", \"\").replace(\"Y\", \"\").lower())\r\n Y.append(int(y.replace('\\n', '')))\r\n test_size = 0.2\r\n # print('Test size:', test_size, '\\nWrong classifications:\\n')\r\n\r\n X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size, random_state=42, stratify=Y)\r\n return X_train, y_train, X_test, y_test", "def gen_data(n_samples=200):\r\n\tnp.random.seed(13)\r\n\tx = np.random.uniform(0,10,size=n_samples) #ไปŽไธ€ไธชๅ‡ๅŒ€ๅˆ†ๅธƒ[0,10)ไธญ้šๆœบ้‡‡ๆ ท\r\n\tx.sort()\r\n\ty = ground_truth(x) + 0.75*np.random.normal(size=n_samples) #ๅ‡ๅ€ผไธบ0๏ผŒๆ ‡ๅ‡†ๅทฎไธบ1็š„ๆญฃๆ€ๅˆ†ๅธƒ\r\n\t#print(\"x, y: \", x[:10], y[:10])\r\n\ttrain_mask = np.random.randint(0,2,size=n_samples).astype(np.bool) #่ฟ”ๅ›žๅ€ผไธบTrue or False\r\n\t#print(\"train_mask: \",train_mask[:10])\r\n\tx_train, y_train = x[train_mask, np.newaxis], y[train_mask] #ๅฆ‚ๆžœtrain_maskๆ˜ฏ1๏ผŒๅฐฑๅฐ†x,yไฝœไธบ่ฎญ็ปƒ้›†๏ผŒnp.newaxis่กจ็คบๅฐ†่กŒๅ‘้‡่ฝฌไธบๅˆ—ๅ‘้‡\r\n\t#print(\"x_train, y_train: \", x_train[:10], y_train[:10])\r\n\tx_test, y_test = x[~train_mask, np.newaxis], y[~train_mask] #train_maskไธบFalse๏ผŒๅฐ†x,yไฝœไธบๆต‹่ฏ•้›†\r\n\t#print(\"x_test, y_test: \", x_test[:10], y_test[:10])\r\n\treturn x_train, x_test, y_train, y_test", "def set_batch_data():\r\n if not os.path.exists(filepath):\r\n download_data()\r\n for n in range(0,6):\r\n d = read(filepath + flist[n])\r\n metadata = read(filepath + flist[-1])\r\n ndata = metadata['num_cases_per_batch']\r\n ndim = metadata['num_vis']\r\n\r\n data, trts = {}, {}\r\n data['labels'] = metadata['label_names']\r\n data['ntraindata'] = metadata['num_cases_per_batch'] * (len(flist) - 2)\r\n data['ntestdata'] = metadata['num_cases_per_batch']\r\n data['ndim'] = metadata['num_vis']\r\n trts['x'], trts['y'] = d['data'], d['labels']\r\n trtsflag = ['train', 'train', 'train', 'train', 'train', 'test']\r\n\r\n data['flag'] = trtsflag[n]\r\n data[trtsflag[n]] = trts\r\n save_pkl(data, savename=flist[n]+'.pkl')", "def create_data(self):\n\n print (f'Using {self.n_s} simulations for the training data to estimate cov')\n print (f'Using {self.n_p} simulations for the upper/lower training data')\n print (f'Number of splits, to increase number simulations: {self.n_train}')\n print (f'Adding noise to the derivative: {np.invert(self.noiseless_deriv)}')\n\n # Number of upper and lower simulations\n n_p = int(self.n_s * self.derivative_fraction)\n\n # set a seed to surpress the sample variance (EVEN FOR CENTRAL SIMULATIONS)\n seed = np.random.randint(1e6) \n # We should double-check to see if the sample variance if being surpressed\n\n # Perturb lower \n np.random.seed(seed)\n t_m = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_p)])\n ,train = -self.delta_theta, flatten = self.flatten\n ,noiseless_deriv = self.noiseless_deriv) \n # Perturb higher \n np.random.seed(seed)\n t_p = self.generate_data(np.array([theta_fid for i in \n range(self.n_train * self.n_p)])\n ,train = self.delta_theta, flatten = self.flatten\n , noiseless_deriv = self.noiseless_deriv)\n\n # Central\n np.random.seed(seed)\n t = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_s)])\n ,train = None, flatten = self.flatten)\n\n\n # derivative data\n t_d = (t_p - t_m) / (2. * self.delta_theta)\n\n # Save in a dict that the network takes\n data = {\"data\": t, \"data_d\": t_d}\n # for plotting purposes we save the upper/lower separately as well\n data[\"x_m\"], data[\"x_p\"] = t_m, t_p \n\n\n # Repeat the same story to generate test data\n print ('\\n')\n print (f'Using {self.n_s} simulations for the test data to estimate cov')\n print (f'Using {self.n_p_val} simulations for the upper/lower test data')\n print (f'Number of splits, to increase number simulations: {self.n_train_val}')\n print (f'Adding noise to the derivative: {np.invert(self.noiseless_deriv)}')\n print ('\\n')\n\n seed = np.random.randint(1e6)\n # Perturb lower \n np.random.seed(seed)\n tt_m = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_p)])\n , train = -self.delta_theta, flatten = self.flatten\n , noiseless_deriv = self.noiseless_deriv)\n # Perturb higher \n np.random.seed(seed)\n tt_p = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_p)])\n , train = self.delta_theta, flatten = self.flatten\n , noiseless_deriv = self.noiseless_deriv)\n # Central sim\n np.random.seed(seed)\n tt = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_s)])\n , train = None, flatten = self.flatten)\n \n # np.random.seed()\n \n # derivative data\n tt_d = (tt_p - tt_m) / (2. * self.delta_theta)\n\n data[\"validation_data\"] = tt \n data[\"validation_data_d\"] = tt_d\n\n # for plotting purposes we save the upper/lower separately\n data[\"x_m_test\"], data[\"x_p_test\"] = tt_m, tt_p \n\n return data", "def train_dataset():\n return TabularDataset.from_path('tests/data/dummy_tabular/train.csv', sep=',')", "def train(self, data):\n pass", "def init_data(dataset_config: dict):\n # train and dev will be in random order, test may be ordered according to labels\n if dataset_config[\"name\"] == \"CoLA\":\n train, dev, test, num_classes = load_cola(dataset_config)\n elif dataset_config[\"name\"] == \"AGNews\":\n train, dev, test, num_classes = load_ag_news(dataset_config)\n elif dataset_config[\"name\"] == \"DBPedia\":\n train, dev, test, num_classes = load_dbpedia(dataset_config)\n elif dataset_config[\"name\"] == \"YRF\":\n train, dev, test, num_classes = load_yrf(dataset_config)\n else:\n raise NameError(f\"Dataset {dataset_config['name']} not implemented.\")\n # etc.\n\n # shrink size if debugging\n if dataset_config[\"debug\"]:\n # choose a random subset using huggingface select function\n train = train.select(random.sample(range(len(train)), k=200))\n dev = dev.select(random.sample(range(len(dev)), k=40))\n test = test.select(random.sample(range(len(test)), k=200))\n\n # create class imbalance\n random.seed(dataset_config[\"seed\"])\n if dataset_config[\"pool_balance\"] == \"balanced\":\n pass\n elif dataset_config[\"pool_balance\"] == \"imbalanced\":\n train = train.filter(lambda example: create_imbalanced_dataset(example, dataset_config[\"imbalance_prop\"], dataset_config['imbalance_cls']))\n else:\n NameError(f\"pool_balance = {dataset_config['pool_balance']} not allowed\")\n\n if dataset_config[\"dev_balance\"] == \"balanced\":\n pass\n elif dataset_config[\"dev_balance\"] == \"imbalanced\":\n dev = dev.filter(lambda example: create_imbalanced_dataset(example, dataset_config[\"imbalance_prop\"], dataset_config['imbalance_cls']))\n else:\n NameError(f\"dev_balance = {dataset_config['dev_balance']} not allowed\")\n\n # get seed labelled pool indices (using the same seed data every time)\n random.seed(dataset_config[\"seed\"])\n if dataset_config[\"seed_balance\"] == \"balanced\":\n # this is random (will have some variance vs pool)\n indices = list(range(len(train)))\n unlabelled_pool_idx, labelled_pool_idx = split(\n indices,\n random_state=dataset_config[\"seed\"],\n test_size=dataset_config[\"seed_size\"]\n )\n elif dataset_config[\"seed_balance\"] == \"stratified\":\n # this is the same as the underlying train set (which may be unbalanced)\n indices = list(range(len(train)))\n unlabelled_pool_idx, labelled_pool_idx = split(\n indices,\n random_state=dataset_config[\"seed\"],\n test_size=dataset_config[\"seed_size\"],\n stratify=train['label']\n )\n elif dataset_config[\"seed_balance\"] == \"imbalanced\":\n # artificially sample an imbalanced seed set from the pool\n unlabelled_pool_idx, labelled_pool_idx = create_imbalanced_seed(\n train,\n num_classes,\n dataset_config[\"seed_size\"],\n dataset_config['imbalance_prop'],\n dataset_config['imbalance_cls']\n )\n else:\n raise NameError(f\"seed_balance = {dataset_config['seed_balance']} not allowed\")\n\n return train, dev, test, num_classes, labelled_pool_idx, unlabelled_pool_idx", "def load_training_data(config):\n # Load data\n LOGGER.info(\"Loading training data.\")\n train_x = load_data(config['data_source'], config['train_x_filename'])\n train_y = load_data(config['data_source'], config['train_y_filename'])\n val_x = load_data(config['data_source'], config['val_x_filename'])\n val_y = load_data(config['data_source'], config['val_y_filename'])\n LOGGER.info(\"Training data size: %d\", len(train_x))\n LOGGER.info(\"Validation data size: %d\", len(val_x))\n\n # Build datasets and create iterators\n LOGGER.info(\"Building dataset.\")\n train_dataset = get_dataset(\n train_x, train_y, config['batch_size'], config['data_shape'],\n config['n_classes'], True)\n val_dataset = get_dataset(\n val_x, val_y, config['batch_size'], config['data_shape'],\n config['n_classes'])\n\n return train_dataset, val_dataset, len(val_x)", "def trainData(self,):\n count = 0\n while count < len(self.RAD_sequences_train):\n RAD_filename = self.RAD_sequences_train[count] \n RAD_complex = loader.readRAD(RAD_filename)\n if RAD_complex is None:\n raise ValueError(\"RAD file not found, please double check the path\")\n ### NOTE: Gloabl Normalization ###\n RAD_data = helper.complexTo2Channels(RAD_complex)\n RAD_data = (RAD_data - self.config_data[\"global_mean_log\"]) / \\\n self.config_data[\"global_variance_log\"]\n ### load ground truth instances ###\n gt_filename = loader.gtfileFromRADfile(RAD_filename, \\\n self.config_data[\"train_set_dir\"])\n gt_instances = loader.readRadarInstances(gt_filename)\n if gt_instances is None:\n raise ValueError(\"gt file not found, please double check the path\")\n\n ### NOTE: decode ground truth boxes to YOLO format ###\n gt_labels, has_label, raw_boxes = self.encodeToLabels(gt_instances)\n\n if has_label:\n yield (RAD_data, gt_labels, raw_boxes)\n count += 1\n if count == len(self.RAD_sequences_train) - 1:\n # np.random.seed() # should I add seed here ?\n np.random.shuffle(self.RAD_sequences_train)", "def creation_data_sets(quality, dataset, test_case=False):\n current_path = Path.cwd()\n if dataset == 0:\n (x_train, y_train), (x_test, y_test) = mnist.load_data()\n del y_train, y_test\n train_path = current_path.joinpath(\"Mnist_{}\".format(quality))\n test_path = current_path.joinpath(\"Mnist_{}_test\".format(quality))\n else:\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n del y_train, y_test\n train_path = current_path.joinpath(\"Cifar-10_{}\".format(quality))\n test_path = current_path.joinpath(\"Cifar-10_{}_test\".format(quality))\n\n create_directories(train_path, test_path)\n convert(train_path, x_train, dataset, quality, test_case)\n convert(test_path, x_test, dataset, quality, test_case)", "def read_data_sets(data_path, fake_data=False, one_hot=False,\n validation_size=5000, source_url={},\n augment=False,\n percentage_train=100.,\n unbalance=False, unbalance_dict={\"percentage\": 20, \"label1\": 0, \"label2\": 8},\n ):\n\n class DataSets(object):\n pass\n\n data_sets = DataSets()\n\n if fake_data:\n data_sets.train = DataSet([], [], fake_data=True, one_hot=True)\n data_sets.validation = DataSet([], [], fake_data=True, one_hot=True)\n data_sets.test = DataSet([], [], fake_data=True, one_hot=True)\n return data_sets\n\n if not source_url: # empty string check\n if 'fashion' in data_path:\n source_url = DEFAULT_SOURCE_URL_FASHION\n else:\n source_url = DEFAULT_SOURCE_URL_MNIST\n\n if 'fashion' in data_path or 'mnist' in data_path: # mnist or fashion\n train_images, train_labels, val_images, val_labels, test_images, test_labels = \\\n load_mnist(data_path, validation_size, source_url, one_hot)\n reshape = True\n else:\n train_images, train_labels, val_images, val_labels, test_images, test_labels = \\\n load_medical_data(data_path)\n reshape = False\n\n # add random permutation to train & validation\n np.random.seed(42)\n\n n_train = train_images.shape[0]\n perm = np.random.permutation(n_train)\n train_images = train_images[perm]\n train_labels = train_labels[perm]\n\n n_val = val_images.shape[0]\n perm = np.random.permutation(n_val)\n val_images = val_images[perm]\n val_labels = val_labels[perm]\n\n # For experiments with data-augmentation\n if augment:\n if 'fashion' in data_path: # rotations +-10 and horizontal flips\n augmented_images, augmented_labels = augment_data(train_images, train_labels, hflip=True)\n elif 'mnist' in data_path: # rotations +-10\n augmented_images, augmented_labels = augment_data(train_images, train_labels, hflip=False)\n train_images = np.concatenate([train_images, np.expand_dims(augmented_images, 3)])\n train_labels = np.concatenate([train_labels, augmented_labels])\n # for the medical datasets, you can use the \"augment\" argument while doing patch extraction\n\n # For experiments with limited amount of data\n if percentage_train != 100.:\n train_size = int(0.01*percentage_train*train_images.shape[0])\n Xtrain_images, Xval_images, ytrain, yval = train_test_split(train_images, train_labels, train_size=train_size)\n train_images = Xtrain_images\n train_labels = ytrain\n\n # For experiments with class-imbalance distribution\n if unbalance:\n n_classes = len(np.unique(np.argmax(train_labels, 1)))\n reduceto = 0.01*unbalance_dict['percentage']\n label1 = unbalance_dict['label1']\n label2 = unbalance_dict['label2']\n\n pick_ids = []\n newsize = 0\n all_classes = np.arange(0, n_classes)\n all_classes = np.delete(all_classes, np.where(all_classes == label1)[0])\n all_classes = np.delete(all_classes, np.where(all_classes == label2)[0])\n\n for lab in [label1, label2]:\n allids = np.where(np.argmax(train_labels, 1) == lab)[0]\n selectedids = np.random.choice(allids, int(reduceto * allids.shape[0]), replace=False)\n pick_ids.append(selectedids)\n newsize += len(selectedids)\n\n new_ids = convert_list_to_array(pick_ids, newsize)\n\n other_ids = []\n othersize = 0\n for lab in all_classes.tolist():\n selectedids = np.where(np.argmax(train_labels, 1) == lab)[0]\n other_ids.append(selectedids)\n othersize += len(selectedids)\n\n keep_ids = convert_list_to_array(other_ids, othersize)\n\n # new_ids: contains the indices of the reduced (imbalance) classes\n # keep_ids: contains the indices of the rest (keep the same class distribution)\n resulting_ids = np.concatenate((new_ids, keep_ids))\n np.random.shuffle(resulting_ids)\n\n train_images = train_images[resulting_ids, ...]\n train_labels = train_labels[resulting_ids, ...]\n\n data_sets.train = DataSet(train_images, train_labels, fake_data=True, one_hot=True, reshape=reshape)\n data_sets.validation = DataSet(val_images, val_labels, fake_data=True, one_hot=True, reshape=reshape)\n data_sets.test = DataSet(test_images, test_labels, fake_data=True, one_hot=True, reshape=reshape)\n\n return data_sets", "def prepare_dataset(data_path, test_size=0.2, validation_size=0.2):\r\n\r\n # load dataset\r\n if data_path.endswith('json'):\r\n X, y = load_data_from_json(data_path)\r\n else:\r\n X, y = load_data_from_fold(data_path)\r\n # create train, validation, test split\r\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size)\r\n X_train, X_validation, y_train, y_validation = train_test_split(X_train, y_train, test_size=validation_size)\r\n\r\n # add an axis to nd array\r\n X_train = X_train[..., np.newaxis]\r\n X_test = X_test[..., np.newaxis]\r\n X_validation = X_validation[..., np.newaxis]\r\n\r\n return X_train, y_train, X_validation, y_validation, X_test, y_test", "def generate_dataset(self):\n\t\timg_set = []\n\t\tqa_set = []\n\t\tfor i in range(self.config.dataset_size):\n\t\t\timg, r = self.generate_image()\n\t\t\tq = self.generate_question()\n\t\t\ta = self.generate_answer(r, q)\n\t\t\timg_sample = {\n\t\t\t\t'id': i,\n\t\t\t\t'image': img.tolist()\n\t\t\t}\n\t\t\timg_set.append(img_sample)\n\t\t\tfor j in range(len(q)):\n\t\t\t\tqa_sample = {\n\t\t\t\t\t'id': i,\n\t\t\t\t\t'question': q[j].tolist(),\n\t\t\t\t\t'answer': a[j].tolist()\n\t\t\t\t}\n\t\t\t\tqa_set.append(qa_sample)\n\t\tprint('Finished creating smaples')\n\t\tdataset = {\n\t\t\t'image':\timg_set,\n\t\t\t'qa':\tqa_set\n\t\t}\n\t\twith open(self.path, 'w') as f:\n\t\t\tjson.dump(dataset, f)", "def create_train_test_sets(conform_shape=True, indi_proportion=0.50, incl_group_imgs=True):\r\n X_train_indi, y_train_indi = build_dataframe('Individual_Training_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n X_test_indi, y_test_indi = build_dataframe('Individual_Test_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n \r\n X_train_group, y_train_group = build_dataframe('Group_Training_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n X_test_group, y_test_group = build_dataframe('Group_Test_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n \r\n X_train_indi, y_train_indi = subsample_dataframe(X_train_indi, y_train_indi,indi_proportion)\r\n \r\n if incl_group_imgs:\r\n X_train = np.concatenate([X_train_indi,X_train_group])\r\n y_train = np.concatenate([y_train_indi,y_train_group])\r\n else: \r\n X_train = X_train_indi.copy()\r\n y_train = y_train_indi.copy()\r\n\r\n return X_train, y_train, X_test_indi, y_test_indi, X_test_group, y_test_group", "def setUp(self):\n self.batch_size = 8\n num_keypoints = 15\n self.data_batch = []\n self.data_samples = []\n\n for i in range(self.batch_size):\n gt_instances = InstanceData()\n keypoints = np.zeros((1, num_keypoints, 2))\n keypoints[0, i] = [0.5 * i, 0.5 * i]\n gt_instances.keypoints = keypoints\n gt_instances.keypoints_visible = np.ones(\n (1, num_keypoints, 1)).astype(bool)\n gt_instances.keypoints_visible[0, (2 * i) % 8, 0] = False\n gt_instances.bboxes = np.random.random((1, 4)) * 20 * i\n gt_instances.head_size = np.random.random((1, 1)) * 10 * i\n\n pred_instances = InstanceData()\n pred_instances.keypoints = keypoints\n\n data = {'inputs': None}\n data_sample = {\n 'gt_instances': gt_instances.to_dict(),\n 'pred_instances': pred_instances.to_dict(),\n }\n\n self.data_batch.append(data)\n self.data_samples.append(data_sample)", "def setUp(self):\n self.batch_size = 8\n num_keypoints = 15\n self.data_batch = []\n self.data_samples = []\n\n for i in range(self.batch_size):\n gt_instances = InstanceData()\n keypoints = np.zeros((1, num_keypoints, 2))\n keypoints[0, i] = [0.5 * i, 0.5 * i]\n gt_instances.keypoints = keypoints\n gt_instances.keypoints_visible = np.ones(\n (1, num_keypoints, 1)).astype(bool)\n gt_instances.keypoints_visible[0, (2 * i) % 8, 0] = False\n gt_instances.bboxes = np.random.random((1, 4)) * 20 * i\n gt_instances.head_size = np.random.random((1, 1)) * 10 * i\n\n pred_instances = InstanceData()\n pred_instances.keypoints = keypoints\n\n data = {'inputs': None}\n data_sample = {\n 'gt_instances': gt_instances.to_dict(),\n 'pred_instances': pred_instances.to_dict(),\n }\n\n self.data_batch.append(data)\n self.data_samples.append(data_sample)" ]
[ "0.8281578", "0.77893066", "0.76956064", "0.7368539", "0.7366503", "0.72501296", "0.721362", "0.7188087", "0.71556234", "0.71437585", "0.7139229", "0.71196824", "0.7102774", "0.71003747", "0.70315087", "0.70204693", "0.7020253", "0.69788855", "0.69752514", "0.69714016", "0.6929242", "0.690724", "0.6890734", "0.6887791", "0.6865315", "0.6852052", "0.68498933", "0.6837032", "0.68231505", "0.6821791", "0.68186605", "0.6813884", "0.6801915", "0.6795159", "0.6793313", "0.679213", "0.6785775", "0.6785081", "0.6784817", "0.67768335", "0.67689824", "0.67482245", "0.6747979", "0.6747381", "0.67299205", "0.67259234", "0.67216164", "0.6720331", "0.67180026", "0.67173886", "0.67126316", "0.67111564", "0.6708483", "0.6703786", "0.6695574", "0.66855407", "0.66802156", "0.6680049", "0.66722286", "0.6671532", "0.6671299", "0.66627514", "0.6652199", "0.66508746", "0.6642598", "0.664164", "0.66315186", "0.66268945", "0.6622923", "0.6618056", "0.6618056", "0.6618056", "0.6618056", "0.66163594", "0.6614253", "0.66040593", "0.6599233", "0.65980285", "0.65974545", "0.6597218", "0.658673", "0.6580573", "0.65803474", "0.65795594", "0.6578823", "0.65753067", "0.6574781", "0.6574683", "0.6555681", "0.6550789", "0.6545744", "0.654367", "0.65427476", "0.6539404", "0.65340227", "0.6533837", "0.6533627", "0.6532762", "0.652176", "0.652176" ]
0.6950932
20
>>> set_N(2) >>> from sudoku_state import N, N_2, N_3, N_4 >>> board = SudokuBoard() >>> squares = [SudokuSquare(id=i) for i in range(N_4)] >>> state = SudokuState(squares=squares, board=board) >>> StatePrinter.print_board_state(state) ====+========+==== | | | | ++ | | | | ====+========+==== | | | | ++ | | | | ====+========+====
def apply_to_sets(cls, sets): for sq_set in sets: sqs_with_val = {} sqs_by_bitmask = {} for sq in iter(sq_set): for sq2 in iter(sq_set): if sq2.known_value: sq.eliminate(sq2) pvals = sq.possible_values() if sq.bitmask not in sqs_by_bitmask: sqs_by_bitmask[sq.bitmask] = [] sqs_by_bitmask[sq.bitmask].append(sq) for val in pvals: if val not in sqs_with_val: sqs_with_val[val] = [] sqs_with_val[val].append(sq) for val, sqs in sqs_with_val.iteritems(): if len(sqs) == 1: sqs[0].set_value(val) for bm, sqs in sqs_by_bitmask.iteritems(): if len(sqs) > 1: pvals = list(SudokuSquare.bitmask_to_possible_values(bm)) if len(sqs) == len(pvals): for sq in iter(sq_set): if sq not in sqs: sq.eliminate(sqs[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def printState(self,board):\n self.printBoard(board.getBoard())\n self.printScore(board,board.getScore())", "def test_state(\n size: Union[int, tuple],\n num_berries: int,\n number_steps: int,\n state_sizes: List[int] = [3, 5],\n) -> None:\n for state_size in state_sizes:\n game = Game(\n size,\n [0, 0],\n -1,\n 5,\n -5,\n 10,\n num_berries,\n berry_movement_probabilities=[0.5] * num_berries,\n state_size=state_size,\n )\n done = False\n i = 1\n print(f\"Beginning full board\\n{game.get_state(full=True)}\")\n print(f\"And the state\\n{game.get_state(state_size)}\")\n while not done and i < number_steps:\n action = random.choice(MOVEMENTS)\n print(f\"Action taken {action}\")\n state, reward, done = game.step(action)\n print(f\"Full board\\n{game.get_state(full=True)}\")\n print(f\"The state\\n{game.get_state(state_size)}\")\n i += 1", "def printboard(state):\n cells = []\n for i in range(3):\n for j in range(3):\n cells.append(NAMES[state[i][j]].center(6))\n print(cells)\n print(*cells)\n print(BOARD_FORMAT.format(*cells))", "def assignState(self):\n\t\tblack = ['r', 'n', 'b','q','k','b','n','r']\n\t\twhite = ['R','N','B','Q','K','B','N','R']\n\n\t\tfor i in range(8):\n\t\t\tself.squares[8*i + 0].state = black[i]\n\t\t\tself.squares[8*i + 1].state = 'p'\n\t\t\tself.squares[8*i + 2].state = '.'\n\t\t\tself.squares[8*i + 3].state = '.'\n\t\t\tself.squares[8*i + 4].state = '.'\n\t\t\tself.squares[8*i + 5].state = '.'\n\t\t\tself.squares[8*i + 6].state = 'P'\n\t\t\tself.squares[8*i + 7].state = white[i]\n\n\t\tfor square in self.squares:\n\t\t\tself.boardMatrix.append(square.state)", "def print_state(self):\n\t\tprint self.time, len(self.state['s']), len(self.state['p']), len(self.state['c'])", "def draw_board(board_state):\n print(\" {} | {} | {} \".format(board_state[6], board_state[7], board_state[8]))\n print(\"-----------\")\n print(\" {} | {} | {} \".format(board_state[3], board_state[4], board_state[5]))\n print(\"-----------\")\n print(\" {} | {} | {} \".format(board_state[0], board_state[1], board_state[2]))", "def print_state(self):\n grid = [[\".\" for _ in range(self.width)] for _ in range(self.height)]\n #icons = [\"^\", \"/\", \">\", \"\\\\\", \"|\", \"/\", \"<\", \"\\\\\"] # NON-UNICODE, uncomment if problems\n icons = [chr(0x2191), chr(0x2197), chr(0x2192), chr(0x2198), \\\n chr(0x2193), chr(0x2199), chr(0x2190), chr(0x2196)]\n for robot in self.robots:\n grid[robot[1]][robot[0]] = icons[(robot[2]+robot[3]) % 8]\n for item in self.items:\n if item[2] == 1:\n grid[item[1]][item[0]] = \"O\"\n elif item[2] == 2:\n grid[item[1]][item[0]] = \"*\"\n print(\"-\"*(self.width+2))\n for i in range(self.height):\n print(\"|\", end=\"\")\n for j in range(self.width):\n print(grid[i][j], end=\"\")\n print(\"|\")\n print(\"-\"*(self.width+2))", "def print_game_state(board):\r\n print(board)\r\n illegal_moves = [(0, 0), (2, 0), (0, 4), (2, 4)]\r\n for i in range(board.shape[0]):\r\n buffer = ''\r\n for j in range(board.shape[1]):\r\n if board[i][j] == 1:\r\n buffer += 'X\\t'\r\n elif board[i][j] == 2:\r\n buffer += '0\\t'\r\n elif (i, j) in illegal_moves:\r\n buffer += ' \\t'\r\n else:\r\n buffer += '-\\t'\r\n print (buffer)", "def __init__(self, n: int):\n self.n = n\n self.board = [[0 for _ in range(n)] for _ in range(n)]", "def makeState(*args,**kwargs):\n \n cells = []\n\n for item in args:\n #print item\n cells.append(item)\n \n newState = State(cells)\n #newState.printBoard()\n return newState", "def print_state(X):\n out = ''\n for coord in range(18):\n out += \"{0}\".format(STATE_VARS[coord])\n val = float(X[coord])\n out += \" {0: 2.4e}\\n\".format(val)\n\n print out", "def print_puzzle(state):\r\n \r\n print('-----')\r\n for i in range(4):\r\n print('|', end=\"\")\r\n for j in range(3):\r\n if state[i][j] == 0:\r\n print(\" |\", end=\"\")\r\n else:\r\n print(\"\", state[i][j], \"|\", end=\"\")\r\n if i == 0:\r\n break\r\n print('\\n-------------')", "def initial_state() -> Board:\n board = (\"rnbqkbnr\", \"pppppppp\", \"........\", \"........\", \"........\",\n \"........\", \"PPPPPPPP\", \"RNBQKBNR\")\n\n return board", "def __repr__(self, state):\n print ' ',\n for w in range(len(state)+2):\n print \"___\",\n print '\\n'\n for x in state:\n print \"| \", x, \" |\"\n print ' ',\n for y in range(len(state)+2):\n print \"___\",\n print '\\n'\n return state", "def display_state(self):\r\n\r\n print('\\n')\r\n print('>>CURRENT STATE')\r\n ct = 0\r\n for i in self.state:\r\n for j in i:\r\n if j == -1:\r\n val = 'X'\r\n else:\r\n val = str(ct)\r\n if len(val) == 1:\r\n print(' ' + val + ' ', end='')\r\n else:\r\n print(val + ' ', end='')\r\n ct += 1\r\n print('\\n')", "def get_board_state_pretty(self):\n\n board_state = ''\n for i in range(0, 3):\n board_state += ' | '.join([self.board['{}{}'.format(i, j)] for j in range(0, 3)])\n board_state += '\\n'\n return board_state", "def showState(self):\n for i in self.state[0]:\n for j in self.state[1]:\n print(self.table[i][j], end=\"\")\n print(\"\")", "def printBoard(self):\n\t\tkey = [' ', 'X', 'O']\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[0][0]] + ' | ' + key[self.state[0][1]] + ' | ' + key[self.state[0][2]])\n\t\tprint(' | |')\n\t\tprint('-----------')\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[1][0]] + ' | ' + key[self.state[1][1]] + ' | ' + key[self.state[1][2]])\n\t\tprint(' | |')\n\t\tprint('-----------')\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[2][0]] + ' | ' + key[self.state[2][1]] + ' | ' + key[self.state[2][2]])\n\t\tprint(' | |')", "def get_board_state(self):\n\n board_state = ''\n for i in range(0, 3):\n board_state += ''.join([self.board['{}{}'.format(i, j)] for j in range(0, 3)])\n return board_state", "def display_board(board_state):\n\n if type(board_state) != str:\n raise TypeError('Given board input must be String')\n\n if len(board_state) != 9:\n raise Exception(\"Input board string length is not 9\")\n\n counter = 0\n # print()\n for position in board_state:\n counter += 1\n if counter % 3 == 0:\n \n if counter != 9:\n paddingString = \"\\n---------\\n\"\n else:\n paddingString = ''\n else:\n paddingString = \" | \"\n\n if position.isnumeric():\n print(\" \", end=paddingString)\n\n else:\n print(position, end=paddingString)\n\n print(\"\\n\\n\")", "def update_state_game_variables(self):\n self.model.numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n self.model.player_mark = \"\"\n self.model.player_move = 0\n self.model.boards = [\"board\"] * 9\n self.update_score_board()", "def show_board(self):\n board_vis = f\"\\n{'*' * 22}Board state{'*' * 23}\\n\"\n str_p2_store=\" \"+str(self.p2_store()) if self.p2_store()<10 else str(self.p2_store())\n board_vis += (f\" {str_p2_store} - | \" +\n \" || \".join(\n [i if len(i) == 2 else ' ' + i for i in list(map(str, self.p2_pits()[::-1]))]) + \" | \\n\")\n board_vis += f\"{'-------' * (self.M + 2)}\\n\"\n board_vis += (\" | \" + \" || \".join(\n [i if len(i) == 2 else ' ' + i for i in list(map(str, self.p1_pits()))]) +\n f\" | - {self.p1_store()}\\n\")\n board_vis += f\"{'*' * 56}\\n\"\n print(board_vis)", "def init_state_10(n):\n if DistEinsum.MPI_RANK == DistEinsum.MPI_ROOT:\n state = cupy.zeros([2] * n, NP_DATA_TYPE)\n state.reshape(-1)[0] = 1.0\n else:\n state = None\n if DistEinsum.MPI_RANK == DistEinsum.MPI_ROOT:\n assert cupy.round(cupy.linalg.norm(state), 6) == 1.0\n else:\n state is None\n # print(state)\n return state", "def __init__(self, n: int):\n self.size = n\n self.board = [[CellValues.EMPTY.value] * n for _ in range(n)]\n self.num_empty_cells = n * n", "def initializeStates(n):\n states = []\n for i in range(n):\n states.append(0)\n return states", "def test_create_state(self):\n\n # States should be abled to be identified by numbers or by strings I suppose.\n # I don't imagine that strings will ever be used.\n mdp = MDP()\n mdp.add_state(0)\n mdp.add_state(1)\n mdp.add_state(2)\n mdp.add_state(3)\n mdp.add_state(4)\n mdp.add_state(5, terminal=True)\n self.assertEqual(mdp.num_states(), 6)", "def test_create_new_mdp_initial_num_states(self):\n\n mdp = MDP(5)\n self.assertEqual(mdp.num_states(), 5)\n\n # this MDP should have 5 states\n self.assertEquals(type(mdp.get_state(0)), State)\n self.assertEquals(type(mdp.get_state(2)), State)\n self.assertEquals(type(mdp.get_state(4)), State)", "def run_iterations(self, n, verbose = False):\n for i in range(n):\n # Calculate total number of neighbors for each cell\n all_neighbors = self.get_all_neighbors()\n all_num_neighbors = np.sum(all_neighbors, axis = (-2,-1)) - self.board\n # Determine new state for each cell using lookup table and number of neighbors\n self.board[:] = np.where(self.board, \n self.lookup[1][all_num_neighbors], \n self.lookup[0][all_num_neighbors])\n # Verbosity check\n if verbose:\n print(self.board)", "def __init__(self, size, given_cells):\n self.ROWS = string.ascii_uppercase[:size ** 2]\n self.COLS = [str(i) for i in range(1, size ** 2)]\n self.size = size\n self.given_cells = given_cells\n self.board = self.create_board()\n self.squares = [utility.cross(i, j) for i in [self.ROWS[i:i + size] for i in range(0, len(self.ROWS), size)]\n for j in [self.COLS[i:i + size] for i in range(0, len(self.COLS), size)]]\n self.attach_neighbors()\n self.update_neighbor_values_by_given()\n print(\"Initial board:\")\n GUI.print_sudoku(self.board, self.size)", "def __draw_board(self, state=None):\n if not state:\n state = self.state\n if self.game_started:\n print('\\r\\033[8A')\n print(emoji.emojize(' {state[0]} | {state[1]} | {state[2]} \\n___|___|___\\n'\n ' {state[3]} | {state[4]} | {state[5]} \\n___|___|___\\n'\n ' {state[6]} | {state[7]} | {state[8]} \\n | | \\n'.format(state=state)))", "def create_board(N):\n board = [[0 for x in range(N)] for y in range(N)] \n return board", "def save_board_state(self):\n self.board_states.append([copy.deepcopy(self.stock), copy.deepcopy(self.wp), \n copy.deepcopy(self.foundations), copy.deepcopy(self.tableaus)])", "def __str__(self):\n s = \"\"\n for r in range(1,self.size+1):\n for c in range(1,self.size+1):\n s += str(self.gameState[r,c])\n return s", "def print_state(state,indent=4):\n if state != False:\n for (name,val) in vars(state).items():\n if name != '__name__':\n for x in range(indent): sys.stdout.write(' ')\n sys.stdout.write(state.__name__ + '.' + name)\n print(' =', val)\n else: print('False')", "def _display_board(state: game.GameState) -> None:\n for row in range(state.get_rows()):\n rowString = \"|\"\n for col in range(state.get_columns()):\n cellValue = state.get_cell_contents(row, col)\n cellState = state.get_cell_state(row, col)\n if cellState == game.EMPTY_CELL:\n rowString += ' '\n elif cellState == game.OCCUPIED_CELL:\n rowString += (' ' + cellValue + ' ')\n elif cellState == game.FALLER_MOVING_CELL:\n rowString += ('[' + cellValue + ']')\n elif cellState == game.FALLER_STOPPED_CELL:\n rowString += ('|' + cellValue + '|')\n elif cellState == game.MATCHED_CELL:\n rowString += ('*' + cellValue + '*')\n rowString += '|'\n print(rowString)\n finalLine = ' '\n for col in range(state.get_columns()):\n finalLine += '---'\n finalLine += ' '\n print(finalLine)", "def simulation(self, gameboard, N):\r\n MAX_DEPTH = 6\r\n score = 0\r\n\r\n # Simulation N times\r\n for i in range(N):\r\n depth = 0\r\n gameboard_cpy = gameboard.copy()\r\n \r\n # Create two temp angents for random moves\r\n _SELF_ = Stochastic(self.color)\r\n _OPPONENT_ = Stochastic(self.opponent_color)\r\n\r\n _SELF_ACTION_ = 1\r\n _OPPONENT_ACTION_ = 0\r\n\r\n while ((_SELF_.win_lose_tie(gameboard_cpy) == 'UNFINISHED')\r\n and (depth < MAX_DEPTH)):\r\n if _SELF_ACTION_ == 1:\r\n temp = _SELF_._policy(gameboard_cpy)\r\n _SELF_.make_move(temp, gameboard_cpy)\r\n\r\n if _OPPONENT_ACTION_ == 1:\r\n temp = _OPPONENT_._policy(gameboard_cpy)\r\n _OPPONENT_.make_move(temp, gameboard_cpy)\r\n\r\n _SELF_ACTION_, _OPPONENT_ACTION_ = _OPPONENT_ACTION_, _SELF_ACTION_\r\n\r\n depth += 1\r\n self.expanded_nodes += 1\r\n\r\n result = _SELF_.win_lose_tie(gameboard_cpy)\r\n if result == 'win':\r\n score += 1.0\r\n elif result == 'lose':\r\n score += 0.0\r\n elif result == 'tie':\r\n score += 0.5\r\n elif depth >= MAX_DEPTH:\r\n count = _SELF_.count_winning_blocks(gameboard_cpy)\r\n score += count[_SELF_.color] / (count[_SELF_.color] + count[_SELF_.opponent_color])\r\n \r\n avg_score = score / N\r\n return avg_score", "def buildBoard(self, n):\n\n boardDict = []\n diagCount = 0\n\n for i in range(n):\n self.rows[i] = [True, \"\", 0] #homogenous, X/O, count of X's/O's\n self.cols[i] = [True, \"\", 0]\n for j in range(n):\n\n# Is there a faster way to make this array than nested for loops?\n boardDict.append((i,j))\n return boardDict", "def initial_state():\n board = [[EMPTY, EMPTY, EMPTY],\n [EMPTY, EMPTY, EMPTY],\n [EMPTY, EMPTY, EMPTY]]\n return board", "def display(self):\n for r in range(1, self.size+1):\n print(\"+\" + (\"-+\"*self.size))\n print(\"|\", end=\"\")\n for c in range(1, self.size+1):\n print(self.gameState[r,c], end=\"\")\n print(\"|\",end=\"\")\n print()\n print(\"+\" + (\"-+\"*self.size))", "def print_state(self):\n print(\"n\\tg\\to\\ta\\tc\\ts\\ttau\\td\\tN\")\n for p in self.persons:\n p.print_state()\n print(\"type\\tpersons\")\n for ps in self.partnerships:\n ps.print_state()", "def __repr__(self):\n s = \"\"\n for y in range(0,HEIGHT):\n temp=\"\"\n for x in range(0,WIDTH):\n temp = temp+ str(self.gameState[x,y])\n s += temp+\"\\n\"\n return s", "def new_board(n: int) -> Board:\n\n return [[0 for _ in range(n)] for _ in range(n)]", "def xpotts_states(n, k):\n\n assert n>0, \"n cannot be <0\"\n assert k>=2, \"k cannot be <2\"\n \n for i in range(k**n):\n state = base_repr(i, k)\n yield ['0']*(n-len(state)) + state", "def init_board():\n\t# Generates a table 10*10 of 0s with -1 around and the initial state\n\t# of the board with 2 whites and 2 blacks in the middle\n\ttable = [[0 if i != 0 and i != 9 else -1 for i in range(10)] if j != 0 and j != 9 else [-1 for i in range(10)] for j in range(10)] #leaves a -1 line around the whole table of 0s\n\t#initial state is drawn and recorded\n\ttable[4][4] = 2\n\ttable[5][5] = 2\n\ttable[4][5] = 1\n\ttable[5][4] = 1\n\tdrawPiece((4,4),2)\n\tdrawPiece((5,5),2)\n\tdrawPiece((4,5),1)\n\tdrawPiece((5,4),1)\n\treturn table", "def get_state(self,board):\n s = range(board.size())\n return [ board.getCell(x,y) for y in s for x in s]", "def naked_round(self):\n self.change = False\n for row in range(self.board_size):\n for col in range(self.board_size):\n if len(self.possibles[row][col]) == 1:\n num = self.possibles[row][col].pop()\n self.set_number(num, row, col, \"NS\")", "def random_puzzle(N=17):\n values = dict((s, digits) for s in squares)\n for s in shuffled(squares):\n if not assign(values, s, random.choice(values[s])):\n break\n ds = [values[s] for s in squares if len(values[s]) == 1]\n if len(ds) >= N and len(set(ds)) >= 8:\n return ''.join(values[s] if len(values[s]) == 1 else '.' for s in squares)\n return random_puzzle(N) ## Give up and make a new puzzle", "def evaluateBoardState(self, board):\n\n \"\"\"\n These are the variables and functions for board objects which may be helpful when creating your Agent.\n Look into board.py for more information/descriptions of each, or to look for any other definitions which may help you.\n\n Board Variables:\n board.width \n board.height\n board.last_move\n board.num_to_connect\n board.winning_zones\n board.score_array \n board.current_player_score\n\n Board Functions:\n get_cell_value(row, col)\n try_move(col)\n valid_move(row, col)\n valid_moves()\n terminal(self)\n legal_moves()\n next_state(turn)\n winner()\n \"\"\"\n if self.id == 1:\n opponent_id = 2\n else:\n opponent_id = 1\n\n maxvalue = 100000\n minvalue = -maxvalue\n winner = board.winner()\n if winner == self.id:\n return maxvalue\n elif winner == opponent_id:\n return minvalue\n size_y = board.height\n size_x = board.width\n map_ = []\n num_to_connect = board.num_to_connect\n total_points = 0\n\n multiply_reachable = 1\n multiply_oddeven = 1\n # basically this function is calculating all the possible win positions\n # more pieces in a possible win position will be counted with more weights\n # a win position with X pieces in it will be counted as X^2 points\n # initialise the zones maps\n for i in range(size_y):\n map_.append([])\n for j in range(size_x):\n map_[i].append([])\n\n # Fill in the horizontal win positions\n for i in range(size_y):\n for j in range(size_x - num_to_connect + 1):\n points = 0\n self_pieces_count = 0\n opponent_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[i][j + k] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[i][j + k] == self.id:\n points += len(board.winning_zones[j+k][i])\n if (self.id == 1 and i % 2 == 1) or (self.id == 2 and i%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n if self_pieces_count == 3 and opponent_pieces_count == 0:\n if j - 1 >= 0 and board.board[i][j + 3] == 0 and board.board[i][j - 1] == 0 \\\n and board.try_move(j + 3) == i and board.try_move(j - 1) == i:\n return maxvalue\n elif j + 4 < size_y and board.board[i][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i and board.try_move(j) == i:\n return maxvalue\n else:\n for k in range(num_to_connect):\n if board.board[i][j + k] == 0 and board.try_move(j + k) == i:\n points *= multiply_reachable\n elif opponent_pieces_count == 3 and self_pieces_count == 0:\n if j - 1 >= 0 and board.board[i][j + 3] == 0 and board.board[i][j - 1] == 0 \\\n and board.try_move(j + 3) == i and board.try_move(j - 1) == i:\n return minvalue\n elif j + 4 < size_y and board.board[i][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i and board.try_move(j) == i:\n return minvalue\n # else:\n # for k in range(num_to_connect):\n # if board.board[i][j + k] == 0 and board.try_move(j + k) == i:\n # points *= -multiply_reachable\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n\n # Fill in the vertical win positions\n for i in range(size_x):\n for j in range(size_y - num_to_connect + 1):\n points = 0\n self_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[j + k][i] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[j + k][i] == self.id:\n points += len(board.winning_zones[i][j+k])\n if (self.id == 1 and (j+k) % 2 == 1) or (self.id == 2 and (j+k)%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n points *= multiply_reachable\n # if opponent_pieces_count == 3 and self_pieces_count == 0:\n # points *= -1\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n\n # Fill in the forward diagonal win positions\n for i in range(size_y - num_to_connect + 1):\n for j in range(size_x - num_to_connect + 1):\n points = 0\n self_pieces_count = 0\n opponent_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[i + k][j + k] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[i + k][j + k] == self.id:\n points += len(board.winning_zones[j+k][i+k])\n if (self.id == 1 and (i+k) % 2 == 1) or (self.id == 2 and (i+k)%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n if self_pieces_count == 3 and opponent_pieces_count == 0:\n if i - 1 >= 0 and j - 1 >= 0 and board.board[i + 3][j + 3] == 0 and board.board[i - 1][j - 1] == 0 \\\n and board.try_move(j + 3) == i + 3 and board.try_move(j - 1) == i - 1:\n return maxvalue\n elif i + 4 < size_y and j + 4 < size_x and board.board[i + 4][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i + 4 and board.try_move(j) == i:\n return maxvalue\n else:\n for k in range(num_to_connect):\n if board.board[i + k][j + k] == 0 and board.try_move(j + k) == i + k:\n points *= multiply_reachable\n elif opponent_pieces_count == 3 and self_pieces_count == 0:\n if i - 1 >= 0 and j - 1 >= 0 and board.board[i + 3][j + 3] == 0 and board.board[i - 1][j - 1] == 0 \\\n and board.try_move(j + 3) == i + 3 and board.try_move(j - 1) == i - 1:\n return minvalue\n elif i + 4 < size_y and j + 4 < size_x and board.board[i + 4][j + 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j + 4) == i + 4 and board.try_move(j) == i:\n return minvalue\n # else:\n # for k in range(num_to_connect):\n # if board.board[i + k][j + k] == 0 and board.try_move(j + k) == i + k:\n # points *= -multiply_reachable\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n\n # Fill in the backward diagonal win positions\n for i in range(size_y - num_to_connect + 1):\n for j in range(size_x - 1, num_to_connect - 1 - 1, -1):\n points = 0\n self_pieces_count = 0\n opponent_pieces_count = 0\n for k in range(num_to_connect):\n if board.board[i + k][j - k] == opponent_id:\n opponent_pieces_count += 1\n elif board.board[i + k][j - k] == self.id:\n points += len(board.winning_zones[j-k][i+k])\n if (self.id == 1 and (i+k) % 2 == 1) or (self.id == 2 and (i+k)%2 == 0):\n points *= multiply_oddeven\n self_pieces_count += 1\n if self_pieces_count == 3 and self_pieces_count == 0:\n if board.board[i + 3][j - 3] == 0 and board.board[i - 1][j + 1] == 0 \\\n and board.try_move(j - 3) == i + 3 and board.try_move(j + 1) == i - 1:\n return maxvalue\n elif i + 4 < size_y and j - 4 >= 0 and board.board[i + 4][j - 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j - 4) == i + 4 and board.try_move(j) == i:\n return maxvalue\n else:\n for k in range(num_to_connect):\n if board.board[i + k][j - k] == 0 and board.try_move(j - k) == i + k:\n points *= multiply_reachable\n\n elif opponent_pieces_count == 3 and self_pieces_count == 0:\n if board.board[i + 3][j - 3] == 0 and board.board[i - 1][j + 1] == 0 \\\n and board.try_move(j - 3) == i + 3 and board.try_move(j + 1) == i - 1:\n return minvalue\n elif i + 4 < size_y and j - 4 >= 0 and board.board[i + 4][j - 4] == 0 and board.board[i][j] == 0 \\\n and board.try_move(j - 4) == i + 4 and board.try_move(j) == i:\n return minvalue\n # else:\n # for k in range(num_to_connect):\n # if board.board[i + k][j - k] == 0 and board.try_move(j - k) == i + k:\n # points *= -multiply_reachable\n if (opponent_pieces_count == 3 and self_pieces_count == 0) or opponent_pieces_count == 0:\n total_points += points\n return total_points", "def _print_board(board):\r\n pass", "def get_every_nth_state(n=10):\n loops = int(50/n)\n state_list = []\n for i in range(loops):\n state_list.append(states[(i+1)*n-1])\n return state_list", "def __display(self,state: dict):\n width = 1+max(len(state[s]) for s in self.__boxes)\n line = '+'.join(['-'*(width*3)]*3)\n for r in self.__rows:\n print(''.join( state[r+c].center(width)+ ('|' if c in '36' else '')\n for c in self.__cols))\n if r in 'CF': print(line)", "def __init__(self, n):\n self.matrix = [[0 for i in range(n)] for j in range(n)]\n self.winning = False", "def initialize_puzzle_board(self, n=3, hType=1, random=True, diff=None):\r\n\t\tself.n = n\r\n\r\n\t\t# While loop to continuously create random boards until a solvable one is made.\r\n\t\tboardList = [x for x in range(n**2)]\r\n\t\twhile random:\r\n\t\t\tshuffle(boardList)\r\n\r\n\t\t\tif self.generated_solvable(boardList):\r\n\t\t\t\tprint \"Found something solvable:\", boardList\r\n\t\t\t\tbreak # From outer While-True\r\n\r\n\t\t# If statements to use non-random, burnt-in boards of various difficulties.\r\n\t\tif not random and n == 3:\r\n\t\t\tif diff == 0:\r\n\t\t\t\tboardList = [3,1,2,4,7,5,6,8,0]\r\n\t\t\telif diff == 1:\r\n\t\t\t\tboardList = [3,2,5,4,1,8,6,0,7]\r\n\t\t\telif diff == 2:\r\n\t\t\t\tboardList = [1,0,6,5,7,4,2,3,8]\r\n\r\n\t\telif not random and n == 4:\r\n\t\t\tif diff == 0:\r\n\t\t\t\tboardList = [4,1,2,3,5,0,6,7,8,9,10,11,12,13,14,15]\r\n\r\n\t\t# Location of 0 (the empty tile) in the flat list.\r\n\t\tlocZero = boardList.index(0)\r\n\r\n\t\t# Using floor division and modulo to attain the nested location of the 0\r\n\t\tself.x = locZero // self.n\r\n\t\tself.y = locZero % self.n\r\n\r\n\t\t# Looping over the flat list and appending it, creating the nested list that is the final board\r\n\t\tfor i in range(self.n):\r\n\t\t\ti1, i2 = self.n*i, self.n*(i+1)\r\n\t\t\tself.board.append(boardList[i1:i2])\r\n\r\n\t\t# Double checking that we determined 0's position correctly.\r\n\t\tassert( self.board[self.x][self.y] == 0 )\r\n\r\n\t\t# Generate the goal (class variable) for the board based on size\r\n\t\tself.generate_goal()\r\n\t\t# Generates the heuristic value for this first board.\r\n\t\tself.generate_heuristic()\r\n\t\t# Generates the hash value for __eq__ from the board.\r\n\t\tself.eqHash = hash(str(self))", "def print_state(id=None):\n data = storage.all(\"State\")\n return render_template('9-states.html', states=data, id=id)", "def __init__(self, size, board):\n self.BoardSize = size #the size of the board\n self.CurrentGameBoard= board #the current state of the game board", "def print_state(self):\n print('\\nthe current state is: ' + str(self.state) + '\\n')", "def __init__(self,m,n):\n self.columns = m\n self.rows = n\n self.board = makeBoard(m,n)", "def make_initial_state(self,seed,scrambles):\n seen = {}\n ns=0\n x = range(self.N*self.N)\n\n for r in range(self.N):\n for c in range(self.N):\n if x[r*self.N+c]==0:\n row,col=r,c\n self.initial = PuzzleState(x,self.N,row,col)\n R = random.Random()\n R.seed(seed)\n while ns<scrambles:\n index = R.randint(0,len(self.actions)-1)\n a = self.actions[index]\n nexts = self.initial.move(a)\n if nexts is not None:\n serial = nexts.__str__()\n if serial not in seen:\n seen[serial] = True\n self.initial = nexts\n ns += 1\n print('Problem:', self.__doc__, 'Initial state:')\n print(self.initial)\n print('==============')", "def __init__(self, state=None, choices=None, n=None, parent=None):\n\n if n is None:\n print \"problem with n\"\n self.n = 8\n else:\n self.n = n\n\n if state is None:\n self.state = []\n for i in range(self.n):\n self.state.append(-1)\n else:\n self.state = state\n\n if choices is None:\n self.choices = []\n sub = set([])\n for a in range(self.n): # puts 0 through 7 inclusive into sub\n sub.add(a)\n for i in range(self.n): # puts sub into choices\n self.choices.append(copy.copy(sub))\n else:\n self.choices = choices\n\n if parent is not None:\n self.parent = parent", "def printBoard(self):", "def print(self):\n print(\" a b c d e f g h \")\n print(\" โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผ\")\n for row in range(8, 0, -1):\n pieces = \" โ”‚ \".join(self.state[row - 1])\n print(f\"{row} โ”‚ {pieces} โ”‚ {row}\")\n print(\" โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”ผ\")\n print(\" a b c d e f g h \")", "def __str__(self):\n s=\"\"\n for y in range(0,HEIGHT):\n for x in range(0,WIDTH):\n s+=str(self.gameState[x,y])\n return s", "def test_get_state_list(self):\n\n mdp = MDP(5)\n state_list = mdp.get_state_list()\n\n self.assertEqual(len(state_list), 5)\n self.assertIn(mdp.get_state(0), state_list)", "def test_simulate_state_output_padding(self, all_n_qubits):\n circuit_batch = []\n for n_qubits in all_n_qubits:\n qubits = cirq.GridQubit.rect(1, n_qubits)\n circuit_batch += util.random_circuit_resolver_batch(qubits, 1)[0]\n\n tfq_results = tfq_simulate_ops.tfq_simulate_state(\n util.convert_to_tensor(circuit_batch), [],\n [[]] * len(circuit_batch))\n\n # Don't use batch_util here to enforce consistent padding everywhere\n # without extra tests.\n sim = cirq.Simulator()\n manual_padded_results = []\n for circuit in circuit_batch:\n result = sim.simulate(circuit)\n wf = result.final_state_vector\n blank_state = np.ones(\n (2**max(all_n_qubits)), dtype=np.complex64) * -2\n blank_state[:wf.shape[0]] = wf\n manual_padded_results.append(blank_state)\n\n self.assertAllClose(tfq_results, manual_padded_results, atol=1e-5)", "def test_location_to_state():\n for num_rows in [12, 10]:\n for num_cols in [15, 9]:\n env = Four_Rooms_Environment(grid_width=num_cols, grid_height=num_rows)\n observed_states = set()\n for row in range(num_rows):\n for col in range(num_cols):\n state = env.location_to_state((row, col))\n assert state not in observed_states\n observed_states.add(state)", "def solve(self, board: List[List[str]]) -> None:\n\n def expansion(i,j):\n for di,dj in {(-1,0),(1,0),(0,-1),(0,1)}:\n if -1<i+di<m and -1<j+dj<n and state[i+di][j+dj]=='O':\n return True\n return False\n\n if not board:\n return board\n\n m = len(board)\n n = len(board[0]) \n state = [['X']*n for _ in range(m)]\n\n for j in range(n):\n state[0][j] = board[0][j]\n state[m-1][j] = board[m-1][j]\n \n for i in range(m):\n state[i][0] = board[i][0]\n state[i][n-1] = board[i][n-1]\n \n flag = 1\n\n while flag:\n flag = 0\n\n for k in range(1, (1+min(m,n))//2):\n for j in range(k,n-k):\n if board[k][j]=='O' and state[k][j] == 'X' and expansion(k,j):\n state[k][j] = 'O'\n flag = 1\n \n if board[m-1-k][j]=='O' and state[m-1-k][j] == 'X' and expansion(m-1-k,j):\n state[m-1-k][j] = 'O'\n flag = 1\n \n for i in range(k,m-k):\n if board[i][k]=='O' and state[i][k] == 'X' and expansion(i,k):\n state[i][k] = 'O'\n flag = 1\n \n if board[i][n-1-k]=='O' and state[i][n-1-k] == 'X' and expansion(i,n-1-k):\n state[i][n-1-k] = 'O'\n flag = 1\n\n board[:] = state[:]", "def startState(self):\n\n n_squares_per_row = int(math.ceil(math.sqrt(self.n_snakes))**2)\n square_size = self.grid_size // int(n_squares_per_row)\n assignment = random.sample(range(n_squares_per_row ** 2), self.n_snakes)\n\n\n assert self.grid_size >= 3*n_squares_per_row\n\n snakes = {}\n for snake, assign in enumerate(assignment):\n head = (random.randint(1, square_size-2) + (assign // n_squares_per_row) * square_size,\n random.randint(1, square_size-2) + (assign % n_squares_per_row) * square_size)\n snakes[snake] = newSnake([head, utils.add(head, random.sample(DIRECTIONS, 1)[0])], snake)\n\n fruits_to_put = 2 * int(self.fruit_ratio) + 1\n start_state = State(snakes, {})\n start_state.addNRandomfruits(fruits_to_put, self.grid_size)\n return start_state", "def set_state(self, state):\n if(np.shape(state) == self.board_size):\n self.board_state = np.array(state)\n else:\n raise Exception('The entered board-state doesn\\'t '\n 'match the given size: %s.' % str(self.board_size))", "def __init__(self, squares=None, ncols=8, nrows=8):\n self.ncols = ncols\n self.nrows = nrows\n\n if not squares:\n self.squares = dict((i, None) for i in xrange(ncols * nrows))\n\n # 0 begins as the top of the board, making it black\n for i in xrange(ncols * 3):\n row, col = i // ncols, i % ncols\n if row % 2 == col % 2:\n self.squares[i] = Piece(\"black\")\n # red would be the bottom 3 rows\n for i in xrange(ncols * (nrows - 3), ncols * nrows):\n row, col = i // ncols, i % ncols\n if row % 2 == col % 2:\n self.squares[i] = Piece(\"red\")", "def set_numcells(self, N):\n\t\tself.create_cells(N)\n\t\tself.connect_cells()\n\t\tself.connect_stim()", "def __str__(self):\n\n # Create grid headers for the table\n headers = [letter for letter in string.ascii_uppercase[:self.width]]\n\n board_state = []\n board_state.extend([[value for value in row] for i, row in enumerate(self.board_state)])\n\n for idx, row in enumerate(board_state):\n row.insert(0, idx + 1)\n\n return tabulate(board_state, headers, tablefmt=\"grid\")", "def update_state(self):\n self.reset_state()\n for piece in self.pieces:\n coordinates = piece.get_block_positions()\n for coor in coordinates:\n x, y = coor\n self.state[y][x] = piece", "def print_board(self):\n\n print\n\n for row in xrange(8):\n for column in xrange(8):\n if self.squares[row][column]:\n print self.squares[row][column],; sys.stdout.write(u'')\n else:\n if self.dark_square((row, column)):\n print u' __ ',; sys.stdout.write(u'')\n else:\n print u' . ',; sys.stdout.write(u'')\n print\n print", "def __repr__(self):\n representantion = ''\n\n for i in range(3):\n for j in range(3):\n representantion += str(self.state[3 * i + j])\n\n if j == 2 and i != 2:\n representantion += '\\n'\n else:\n representantion += ' '\n\n return representantion", "def __init__(self, size):\n self.size = size\n self.num_queens_placed = 0\n self.board = self.generate_board()", "def serialize(board):\n bstate = np.zeros(shape=64, dtype=np.uint)\n\n pieces_map = {\"p\": 1, \"n\": 2, \"b\": 3, \"q\": 4, \"k\": 5, \"r\": 6}\n\n # General cases\n for i, piece in board.piece_map().items():\n piece = str(piece)\n piece_num = 0\n if piece.isupper():\n piece_num += 8\n piece = piece.lower()\n piece_num += pieces_map[piece]\n bstate[i] = piece_num\n\n # Special cases...\n # Castling\n for location, has_rights in [(0, board.has_queenside_castling_rights(True)),\n (7, board.has_kingside_castling_rights(True)),\n (63, board.has_kingside_castling_rights(False)),\n (63 - 7, board.has_queenside_castling_rights(False))]:\n if has_rights:\n bstate[location] += 1\n\n # En Passant\n ep_square = board.ep_square\n if ep_square is not None:\n bstate[ep_square] = 8\n\n bstate = bstate.reshape((8, 8))\n\n state = np.zeros(shape=(5, 8, 8), dtype=np.uint8)\n\n # Bitwise magic to convert everything into binary values\n state[0] = (bstate >> 0) & 1\n state[1] = (bstate >> 1) & 1\n state[2] = (bstate >> 2) & 1\n state[3] = (bstate >> 3) & 1\n\n state[4] = board.turn * 1.0\n\n return state", "def __init__(self, board = INITIAL_BOARD, n = 5):\n self.n = n\n self.numPlayer1 = 0\n self.numPlayer2 = 0\n self.board = [values[:] for values in board]\n for i in range(self.n):\n for j in range(self.n):\n if self.board[i][j] == -1:\n self.numPlayer1 += 1\n if self.board[i][j] == 1:\n self.numPlayer2 += 1", "def __call__(self, s, n=1000):\n\n root = StateNode(None, s, self.game)\n \n if root.parent is not None:\n raise ValueError(\"Root's parent must be None.\")\n \n for _ in range(n):\n #selection\n node = _get_next_node(root, self.tree_policy)\n #simulation\n node.reward = self.default_policy(node)\n #print(node.reward)\n #back\n self.backup(node)\n \n root.reset(copy.deepcopy(self.game_bak))\n \n #for i in root.children:\n # print(root.children[i].__dict__)\n # for j in root.children[i].children:\n # print(root.children[i].children[j].__dict__)\n # print(\"=======\")\n return rand_max(root.children.values(), key=lambda x: x.q).action, rand_max(root.children.values(), key=lambda x: x.q).q", "def draw_game_state(screen, gs):\n draw_board(screen)\n draw_pieces(screen, gs.board)", "def makeBoard(n):\n valid_positions = []\n for i in range(0, n):\n for j in range(0,n):\n valid_positions.append(Position(i,j))\n return valid_positions", "def state_rep (self):\n\n # Computing dealer_card\n dealer_card = self.dealer[0]\n\n # Compute player_max\n player_max = self.max_safe_sum()\n\n # State should not be bust\n assert (1 <= dealer_card <= 10)\n assert (0 <= player_max <= 31)\n\n # Compute table number\n possibilities = get_full_state (self.me)\n # possibilities = [p for p in possibilities if 0 <= p <= 31]\n\n table_no = 0\n for idx, p in enumerate(possibilities):\n if 0 <= p <= 31:\n table_no = idx\n assert 0 <= table_no <= 3\n\n # print (possibilities)\n return (table_no, dealer_card, player_max)", "def make_board(N):\n assert N >= 1, \"Invalid board dimension\";\n assert type(N) == int, \"N must be an integer\";\n return [[\"*\" for x in range(N)] for x in range(N)];", "def __init__(self, n):\n self.rows = [0 for _ in range(n)]\n self.columns = [0 for _ in range(n)]\n # First diagonal x+y, second y-x\n self.diagonal = [0, 0]\n self.score = {1: 1, 2: n+1}\n self.win = {1: n, 2: (n+1)*n}\n self.size = n", "def generate():\n global BOARD\n next = [[0] * ROWS for _ in range(COLS)]\n # Loop through every spot in our 2D array and check spots neighbors\n for x in range(COLS):\n for y in range(ROWS):\n # Add up all the states in a 3x3 surrounding grid\n neighbors = 0\n for i in range(-1, 2):\n for j in range(-1, 2):\n nx = (x + i + COLS) % COLS\n ny = (y + j + ROWS) % ROWS\n neighbors += BOARD[nx][ny]\n # A little trick to subtract the current cell's state since\n # we added it in the above loop\n neighbors -= BOARD[x][y]\n # Rules of Life\n if BOARD[x][y] == 1 and neighbors < 2 : next[x][y] = 0 # Loneliness\n elif BOARD[x][y] == 1 and neighbors > 3 : next[x][y] = 0 # Overpopulation\n elif BOARD[x][y] == 0 and neighbors == 3: next[x][y] = 1 # Reproduction\n else: next[x][y] = BOARD[x][y] # Stasis\n # Next is now our board\n BOARD = next", "def __str__(self):\n #formatting board correctly\n formatted_board = \"\"\n for i in range(self.size):\n formatted_board += str(self.board[i]) + \"\\n\"\n return \"Board size: \" + str(self.size) + \"\\n\" + \"Number of Queens placed: \" + str(self.num_queens_placed) + \"\\n\" + str(formatted_board)", "def print_state():\n global simulator\n if simulator is None:\n print \"program is not started\"\n return\n print simulator.state()", "def create_board(self, size):\n self.board = [\n [FieldState.EMPTY for _ in range(size)]\n for _ in range(size)\n ]", "def __init__(self, n: int):\n self.rows = [[0] * n for i in range(2)]\n self.cols = [[0] * n for i in range(2)]\n self.d = [0, 0]\n self.subd = [0, 0]\n self.n = n\n self.winner = 0", "def numStates(self):\n return len(self._mdp.S)", "def stake(n, play, opposite_play, current_state):\n\ttotal_occupied = current_state[\"X\"] + current_state[\"O\"] \n\t\"\"\"\n\t\tstates = [{\n\t\t\t\"X\": ...,\n\t\t\t\"O\": ...,\n\t\t\t\"type\" : \"stake\"\n\t\t},{\n\t\t...\n\t\t}]\n\t\"\"\"\n\tstates = []\n\tboard_range = range(n) \n\tfor i in board_range:\n\t\tfor j in board_range:\n\t\t\tif not [i,j] in total_occupied:\n\t\t\t\ttemp_play = {}\n\t\t\t\ttemp_play[play] = current_state[play] + [[i,j]]\n\t\t\t\ttemp_play[opposite_play] = current_state[opposite_play]\n\t\t\t\t#temp_play[play].append()\n\t\t\t\ttemp_play[\"move\"] = [i,j]\n\t\t\t\ttemp_play[\"type\"] = \"Stake\"\n\t\t\t\tstates.append(temp_play)\n\n\treturn states", "def __init__(self, n_states: int, n_actions: int):\n self._p = {s: {a: [] for a in range(n_actions)} for s in range(n_states)}", "def make(self,state_board):\n\t\tstate_board[self.column][self.line] = self.couleur #place the piece\n\t\tdrawPiece((self.column,self.line),self.couleur) #draws it on the board\n\t\tfor pos in self.flips: #flips all the pieces in flips\n\t\t\tstate_board[pos[0]][pos[1]] = self.couleur\n\t\t\tdrawPiece(pos,self.couleur) #draws it on the board", "def print_board(self):\n print('Board:')\n print('\\n'.join([''.join(['{:4}'.format(item) for item in row]) for row in self.board]))", "def solveNQ(n):\n\n board = []\n for i in range(n):\n for j in range(n):\n board[i][j].append(0)\n\n if solveNQUtil(board, 0, n) == False:\n print(\"Solution does not exist\")\n return False\n printSolution(board)\n return True", "def __str__(self):\n board = ''\n board_2 = ''\n\n for row in self.from_grid:\n for space in row:\n board += ' ' + space\n board += '\\n'\n\n for row in self.to_grid:\n for space in row:\n board_2 += ' ' + space\n board_2 += '\\n'\n\n return 'Current State:\\n' + board + 'Target State:\\n' + board_2", "def test_mdp_size(self):\n mdp = MDP()\n self.assertEqual(mdp.num_states(), 0)\n\n mdp = MDP(5)\n self.assertEqual(mdp.num_states(), 5)\n\n mdp = MDP()\n mdp.add_state(0)\n mdp.add_state(1)\n self.assertEqual(mdp.num_states(), 2)", "def calculate_next_board_state(self):\n new_board_state = np.zeros_like(self.board_state)\n\n for x in range(self.board_size[0]):\n for y in range(self.board_size[0]):\n new_board_state[x][y] = self.next_state_of_cell(x,y)\n \n self.set_state(new_board_state)", "def current_state(self):\n\n square_state = np.zeros((4, self.width, self.height))\n if self.state:\n moves, players = np.array(list(zip(*self.state.items())))\n move_curr = moves[players == self.current_player]\n move_oppo = moves[players != self.current_player]\n square_state[0][move_curr // self.width,\n move_curr % self.height] = 1.0\n square_state[1][move_oppo // self.width,\n move_oppo % self.height] = 1.0\n # indicate the last move location\n square_state[2][self.last_move // self.width,\n self.last_move % self.height] = 1.0\n if len(self.state) % 2 == 0:\n square_state[3][:, :] = 1.0 # indicate the colour to play\n return square_state", "def nqueens(size):\n if type(size) is not int:\n print(\"N must be a number\")\n return\n if size < 4:\n print(\"N must be at least 4\")\n return\n queens = [0] * size\n\n def printsolution(queens):\n print(\"[[0, \", queens[0], \"]\", sep=\"\", end=\"\")\n for y, x in enumerate(queens[1:], 1):\n print(\", [\", y, \", \", x, \"]\", sep=\"\", end\"\")\n print(\"]\")\n\n def queencalc(queen):\n \"\"\"Recursive call queen position validator\"\"\"\n for x in range(size):\n \"\"\"horizontal board positions per queen\"\"\"\n nextx = 0\n for y in range(queen):\n qx = queens[y]\n if x == qx or x + queen == qx + y or x - queen == qx - y:\n nextx = 1\n break\n if nextx == 1:\n nextx == 0\n continue\n if queen != size - 1:\n queens[queen + 1] = 0\n queens[queen] = x\n queencalc(queen + 1)\n else:\n queens[queen] = x\n printsolution(queens)\n queencalc(0)", "def print_board(self):\n for i in range(self.size):\n print(\" \".join(self.board[i]))\n print(\"\\n\")", "def set_numcells(self, N = []):\n self.set_gids(N)\n self.create_cells()\n\n #self.syn_output() # generate synaptic \"output\" in neuron\n #self.connect_cells()" ]
[ "0.6484789", "0.63871104", "0.6277676", "0.61892223", "0.617585", "0.6139663", "0.6053929", "0.603311", "0.60163975", "0.60072374", "0.59916526", "0.59657776", "0.59387004", "0.5936951", "0.59288216", "0.5927014", "0.5874181", "0.58594954", "0.58288336", "0.57523865", "0.5734592", "0.57227856", "0.5707126", "0.5697907", "0.5692992", "0.56538385", "0.55554706", "0.553726", "0.5530524", "0.55050683", "0.5497017", "0.54797393", "0.5467647", "0.54656965", "0.5463494", "0.5448604", "0.5443742", "0.54269546", "0.5415522", "0.54098547", "0.540003", "0.5395787", "0.53949535", "0.53917515", "0.5390327", "0.53884673", "0.53810775", "0.53788775", "0.5373769", "0.5360231", "0.53595287", "0.53508955", "0.53452104", "0.5338196", "0.5333844", "0.5302688", "0.5299605", "0.5297593", "0.52831197", "0.5275468", "0.52729464", "0.5270707", "0.5266778", "0.5259794", "0.5259711", "0.5258456", "0.5256884", "0.5250513", "0.52479684", "0.524034", "0.52388656", "0.5225973", "0.5225114", "0.52223647", "0.52212954", "0.5209328", "0.520868", "0.5198666", "0.5196659", "0.5194438", "0.5189164", "0.51827896", "0.5173883", "0.51728344", "0.5169751", "0.51658", "0.51635337", "0.5159534", "0.5158095", "0.5157982", "0.5157765", "0.51469177", "0.5143248", "0.514094", "0.51349133", "0.512979", "0.5127444", "0.51055616", "0.51045054", "0.50958645", "0.50897574" ]
0.0
-1
Calculate net income after federal and state tax
def calc_tax(state: str, income: float, federal_tax: float = 10.0): states_taxes = { "AL": ("Alabama", 5), "AK": ("Alaska", 3), "FL": ("Florida", 4), "IL": ("Illinois", 8), } state = state.upper() if state not in states_taxes: raise AssertionError("Taxes calculation of '{}' is not available. List: {}" .format(state, states_taxes.keys())) net = income - (income * federal_tax / 100) print("Net after Federal Taxes:", net) tax_to_deduct = net * states_taxes[state][1] / 100.0 net = net - tax_to_deduct print("Net after {} Taxes: {}".format(states_taxes[state][0], net))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def monthly_net_income(gross_income, income_tax):\n return gross_income - income_tax", "def calculate_taxes(self, proforma, technologies):\n tax_calcs = copy.deepcopy(proforma)\n # 1) Redistribute capital cost according to the DER's MACRS value to get depreciation\n for der_inst in technologies:\n tax_contribution = der_inst.tax_contribution(self.macrs_depreciation,\n tax_calcs.index, self.start_year)\n if tax_contribution is not None:\n tax_calcs = pd.concat([tax_calcs, tax_contribution], axis=1)\n # 2) calculate yearly_net (taking into account the taxable contribution of each technology\n # asset)\n yearly_net = tax_calcs.sum(axis=1)\n tax_calcs['Taxable Yearly Net'] = yearly_net\n\n # 3) Calculate State tax based on the net cash flows in each year\n tax_calcs['State Tax Burden'] = yearly_net * -self.state_tax_rate\n\n # 4) Calculate Federal tax based on the net cash flow in each year minus State taxes\n # from that year\n yearly_net_post_state_tax = yearly_net + tax_calcs['State Tax Burden']\n tax_calcs['Federal Tax Burden'] = yearly_net_post_state_tax * -self.federal_tax_rate\n\n # 5) Add the overall tax burden (= state tax + federal tax) to proforma\n tax_calcs['Overall Tax Burden'] = tax_calcs['State Tax Burden'] + tax_calcs['Federal Tax Burden']\n proforma['State Tax Burden'] = tax_calcs['State Tax Burden']\n proforma['Federal Tax Burden'] = tax_calcs['Federal Tax Burden']\n proforma['Overall Tax Burden'] = tax_calcs['Overall Tax Burden']\n self.tax_calculations = tax_calcs\n return proforma", "def total_cost_w_tax(tax_rate, state, cost_amount):\n state.upper()\n default_tax_rate = 0.05\n if state == 'CA':\n total_cost = (cost_amount * .07) + cost_amount\n elif tax_rate != 0.07 or tax_rate != 0.05:\n total_cost = (cost_amount * tax_rate) + cost_amount\n else:\n total_cost = (cost_amount * default_tax_rate) + cost_amount\n return total_cost, state.upper()", "def calculateSingleTax(monthlyIncome):\n pass", "def tax_rate(self) -> float:\n return round((self.total / self.income) * 100, 2)", "def cash_income(df):\n return (df.aftertax_income -\n (1 - tc.HOUSING_CASH_SHARE) * df.housing_ben -\n (1 - tc.MCAID_CASH_SHARE) * df.mcaid_ben -\n (1 - tc.MCARE_CASH_SHARE) * df.mcare_ben -\n (1 - tc.OTHER_CASH_SHARE) * df.other_ben -\n (1 - tc.SNAP_CASH_SHARE) * df.snap_ben -\n (1 - tc.SSI_CASH_SHARE) * df.ssi_ben -\n (1 - tc.TANF_CASH_SHARE) * df.tanf_ben -\n (1 - tc.VET_CASH_SHARE) * df.vet_ben -\n (1 - tc.WIC_CASH_SHARE) * df.wic_ben)", "def calculateMarriedTax(husbandIncome, wifeIncome):\n pass", "def before_tax_profit(self):\n\n _before_tax_profit = self.net_revenue() - self.depreciation()\n return _before_tax_profit", "def _ebit(self):\n return self.net_income + self.tax_expense + self.interest_expense", "def market_income(df):\n return df.expanded_income - df[mdf.BENS].sum(axis=1)", "def _compute_tax(self):\n for line in self:\n line.tax = (line.amount_untaxed * 14) / 100", "def tax_rate(self, income):\n if income <= 1500:\n rate = 0.03\n elif income <= 4500:\n rate = 0.1\n elif income <= 9000:\n rate = 0.2\n elif income <= 35000:\n rate = 0.25\n elif income <= 55000:\n rate = 0.3\n elif income <= 80000:\n rate = 0.35\n else:\n rate = 0.45\n return rate", "def after_tax_profit(self):\n _after_tax_profit = self.before_tax_profit() * (1 - self.tax_rate)\n return _after_tax_profit", "def total(self) -> float:\n\n remained_to_be_taxed = self.income\n # taxed = list()\n self.tax_amounts = []\n start_tax_range = 0\n end_tax_range = self.bracket\n\n for i, b in enumerate(self.bracket):\n\n amount_to_tax = b.end - start_tax_range\n t = Taxed(min(amount_to_tax, remained_to_be_taxed), b.rate,\n min(amount_to_tax, remained_to_be_taxed) * b.rate)\n self.tax_amounts.append(t)\n # print(i, start_t ax_range, b.end, amount_to_tax, b.rate)\n\n remained_to_be_taxed -= amount_to_tax\n # print(remained_to_be_taxed)\n\n if b.end > self.income:\n break\n\n start_tax_range = b.end\n\n # print(taxed)\n return sum([t.tax for t in self.tax_amounts])", "def total_item_cost(state, cost_before_tax, tax = .05):\n\n\tif state == \"CA\":\n\t\ttax = .07\n\t\n\ttotal_cost = cost_before_tax + (cost_before_tax * tax)\n\n\treturn total_cost", "def cash_flow(self):\n _cash_flow = self.after_tax_profit() + self.depreciation()\n return _cash_flow", "def basket_total_incl_tax(self):\n return self.total_incl_tax - self.shipping_incl_tax - self.surcharge_incl_tax", "def calculate_tax_for_new_regime(income, **kwargs):\n tax_amount = 0\n init_income = income\n\n slab_breaks = sorted(new_tax_regime.keys(), reverse=True)\n\n for upper_tax_slab in slab_breaks:\n if income > upper_tax_slab:\n curr_tax = new_tax_regime[upper_tax_slab] * (income - upper_tax_slab)\n tax_amount += curr_tax\n print(\"Tax slab: {}\\tAmount in the slab: {}\\tTax amount: {}\".format(upper_tax_slab, (income - upper_tax_slab), curr_tax))\n income = upper_tax_slab\n\n if init_income <= 500000:\n rebate = -1 * tax_amount\n print(\"Rebate: {}\".format(rebate))\n tax_amount += rebate\n\n health_ed_cess = 0.04 * tax_amount # 4% health and education cess on the tax amount\n print(\"Cess: {}\".format(health_ed_cess))\n tax_amount += health_ed_cess\n\n return tax_amount", "def calculator(self, income):\n annuity = float(config.basic(income)) # ็คพไฟๆ€ป้ข\n out = []\n if float(income) > 3500.00:\n taxable_income = (float(income) - float(annuity) - 3500.00) # ่ฏพ็จŽๅฏน่ฑก้‡‘้ข\n taxrate = self.tax_rate(taxable_income) # ็จŽ็Ž‡\n deduction = deductions[taxrate] # ้€Ÿ็ฎ—ๆ‰ฃ้™คๆ•ฐ\n tax = taxable_income * taxrate - deduction # ไธช็จŽ้‡‘้ข\n after = float(income) - float(tax) - float(annuity) # ็จŽๅŽๅทฅ่ต„\n # print(\"็คพไฟๆ€ป้ข:{}๏ผŒ ไธช็จŽ้‡‘้ข๏ผš{}๏ผŒ ็จŽๅŽๅทฅ่ต„๏ผš{}\".format(annuity, tax, after))\n else:\n tax = 0.00 # ไธช็จŽ้‡‘้ข\n after = float(income) - annuity\n for i in [annuity, tax, after]:\n out.append('{:.2f}'.format(i))\n return out", "def tax(subtotal, discount):\n return (subtotal - discount) * 0.12", "def calculateMarriedTax(husbandIncome, wifeIncome):\r\n pass\r\n a = husbandIncome + wifeIncome\r\n if a == 0:\r\n return 0 \r\n elif 1 <= a <= 19050:\r\n return 10\r\n elif 19051 <= a <= 77400:\r\n return 12\r\n elif 77401 <= a <= 165000:\r\n return 22\r\n elif 165001 <= a <= 315000:\r\n return 24\r\n elif 315001 <= a <= 400000:\r\n return 32\r\n elif 400001 <= a <= 600000:\r\n return 35\r\n else:\r\n return 37", "def PV_NetCashflows(t):\n if t > last_t:\n return 0\n else:\n return (prj_incm_Premium(t)\n - prj_exps_Total(t)\n - prj_bnft_Total(t) / (1 + DiscRate(t))\n + PV_NetCashflows(t + 1) / (1 + DiscRate(t)))", "def get_total(self):\n\n base_price=5\n if self.species == \"Christmas\":\n base_price=1.5*base_price\n \n total = (1 + self.tax) * self.qty * base_price\n\n if self.order_type==\"international\" and self.qty<10:\n total+=3\n\n return total", "def calculate_tax(subtotal):\n return \"TAX: \"+format_usd(0.0875*subtotal)", "def calculateSingleTax(monthlyIncome):\r\n a = monthlyIncome * 12\r\n if a == 0:\r\n return 0 \r\n elif 1 <= a <= 9525:\r\n return 10\r\n elif 9526 <= a <= 38700:\r\n return 12\r\n elif 38701 <= a <= 82500:\r\n return 22\r\n elif 82501 <= a <= 157500:\r\n return 24\r\n elif 157501 <= a <= 200000:\r\n return 32\r\n elif 200001 <= a <= 500000:\r\n return 35\r\n else:\r\n return 37", "def calculate_total_cost(state):\n pass", "def get_total(self):\n\n base_price = 5\n \n if self.species == \"Christmas melon\":\n base_price = base_price * 1.5 \n\n total = (1 + self.tax) * self.qty * base_price \n\n if self.order_type == \"international\" and self.qty>10:\n total += 3\n\n\n return total", "def calculate_income(self, request, parent_lookup_client, pk, format=None):\n # TODO: Make this work\n return Response(2345)", "def basket_total_excl_tax(self):\n return self.total_excl_tax - self.shipping_excl_tax - self.surcharge_excl_tax", "def total_discount_incl_tax(self):\n discount = D(\"0.00\")\n for line in self.lines.all():\n discount += line.discount_incl_tax\n return discount", "def tax(bill):\r\n bill *= 1.08\r\n print(\"With tax: %f\" % bill)\r\n return bill", "def netnet(df_balance):\n\n # Calculate NetNet.\n df_result = df_balance[CASH_EQUIV_ST_INVEST].fillna(0) \\\n + df_balance[ACC_NOTES_RECV].fillna(0) * 0.75 \\\n + df_balance[INVENTORIES].fillna(0) * 0.5 \\\n - df_balance[TOTAL_LIABILITIES]\n\n # Rename the result.\n df_result.rename(NETNET, inplace=True)\n\n return df_result", "def net_revenue(self):\n\n _net_revenue = self.sales() * self.unit_contribution()\n return _net_revenue", "def test_tax_net_pay_65(self):\n net_pay_age = tc.total_calc_tax(100, 65)\n self.assertEqual(95, net_pay_age)", "def net_position(self):\n average_price = 0\n sum = 0\n for transaction in self.transactions:\n average_price += abs(transaction[0]/transaction[1])\n sum += transaction[1]\n\n average_price /= len(self.transactions) \n average_price *= sum\n \n return average_price", "def calculate_total_price(total, taxes):\n total_price = total + taxes\n return total_price", "def tax(bill):\n bill *= 1.08\n print \"With tax: %f\" % bill\n return bill", "def tax(bill):\n bill *= 1.08\n print \"With tax: %f\" % bill\n return bill", "def tax(bill):\n bill *= 1.08\n print \"With tax: %.2f\" % bill\n return bill", "def calculate_tax_for_old_regime(income,\n savings_80c=0,\n savings_80ccd=0,\n savings_80d=0,\n age=25,\n parent_age=55\n ):\n tax_amount = 0\n savings_80c = min(savings_limit['80C'], savings_80c)\n savings_80ccd = min(savings_limit['80CCD'], savings_80c)\n savings_limit_80d = savings_limit['80D'] * (2 if age > 60 else 1)\n savings_limit_80d += savings_limit['80D'] * (2 if parent_age > 60 else 1)\n savings_80d = min(savings_limit_80d, savings_80d)\n\n total_savings = savings_80c + savings_80ccd + savings_80d\n print(\"Total savings: {}\".format(total_savings))\n\n deducted_income = income - total_savings\n init_income = deducted_income\n\n slab_breaks = sorted(old_tax_regime.keys(), reverse=True)\n\n for upper_tax_slab in slab_breaks:\n if deducted_income > upper_tax_slab:\n curr_tax = old_tax_regime[upper_tax_slab] * (deducted_income - upper_tax_slab)\n tax_amount += curr_tax\n print(\"Tax slab: {}\\tAmount in the slab: {}\\tTax amount: {}\".format(upper_tax_slab, (deducted_income - upper_tax_slab), curr_tax))\n deducted_income = upper_tax_slab\n\n if init_income <= 500000:\n rebate = -1 * tax_amount\n print(\"Rebate: {}\".format(rebate))\n tax_amount += rebate\n\n health_ed_cess = 0.04 * tax_amount # 4% health and education cess on the tax amount\n print(\"Cess: {}\".format(health_ed_cess))\n tax_amount += health_ed_cess\n\n return tax_amount", "def tax_calculator(tax, cost):\n return float(tax * cost)", "def calculate_total_cost(state):\r\n return state.cost()", "def test_tax_net_pay_45(self):\n net_pay_age = tc.total_calc_tax(100, 45)\n self.assertEqual(91, net_pay_age)", "def test_tax_age_bracket_45(self):\n net_pay_age = elijah.total_calc_tax()\n self.assertEqual(91, net_pay_age)", "def _compute_gasto_subtotal(self):\n beneficio = ingreso_subtotal - gasto_subtotal_comercial", "def income(self, fromdt, todt):\r\n return self._buildTransDict(fromdt, todt, Income)", "def daily_incidents(df2):\n\n if (df2[\"Holiday\"] == \"Thanksgiving Day\") | (df2[\"Holiday\"] == \"Christmas Day\"):\n d_inc = df2[\"Total\"] / 18\n elif df2[\"Holiday\"] == \"Non-holidays\":\n d_inc = df2[\"Total\"] / 6712\n else:\n d_inc = df2[\"Total\"] / 19\n\n return d_inc", "def get_fuel_price(state_abbr, fuel_type=\"NG\", year=False):\n\n if(not year):\n\n year = UpdateParams.today.year\n\n if fuel_type.upper() == \"NG\":\n\n series_ID = \"NG.N3035\" + state_abbr + \"3.A\"\n \n series_USA = \"NG.RNGWHHD.A\"\n \n series_LA = UpdateParams.api.data_by_series(series=\"NG.N3035\" + \"LA\" + \"3.A\")\n \n dict_key_LA = list(series_LA.keys())[0]\n\n elif fuel_type.upper() == \"COAL\":\n\n series_ID = \"COAL.COST.\" + state_abbr + \"-10.A\"\n\n series_USA = \"COAL.COST.US-10.A\"\n\n elif fuel_type.upper() == \"PETRO\":\n # state level wholesale/resale price data ends 2011\n series_ID = \"PET.EMA_EPPR_PWA_S\" + state_abbr + \"_DPG.A\"\n\n series_USA = \"PET.EMA_EPPR_PWG_NUS_DPG.A\"\n\n else:\n raise AssertionError(\"Please input a valid fuel_type\")\n\n fuel_series_USA = UpdateParams.api.data_by_series(series=series_USA)\n \n dict_key_USA = list(fuel_series_USA.keys())[0]\n \n # find latest USA value\n i = 0\n\n while True:\n \n try:\n fp_USA = fuel_series_USA[dict_key_USA][str(year-i) + \" \"] / 1.0\n\n break\n\n except:\n \n i += 1\n\n # Check if state-level available, if not return USA price\n try:\n fuel_series = UpdateParams.api.data_by_series(series=series_ID)\n\n dict_key = list(fuel_series.keys())[0]\n\n # if fuel price in state is empty return national price\n if all(v is None for v in list(fuel_series[dict_key].values())):\n \n return (fp_USA, year-i)\n \n except KeyError:\n \n return (fp_USA, year-i)\n\n j = 0\n\n # find latest year for state\n while True:\n\n try:\n fp_state = fuel_series[dict_key][str(year-j) + \" \"] / 1.0\n\n break\n\n except:\n\n j += 1\n \n if fuel_type.upper() == \"NG\":\n # series_LA is just the actual series not a series ID\n fp_mult = fp_state / series_LA[dict_key_LA][str(year-j) + \" \"]\n return (fp_mult * fp_USA/1.037, year-j)\n \n # return USA value if 2 years more recent vs state\n if ((year-i) - (year-j) >= 2) | (fp_state >= fp_USA):\n \n return (fp_USA/1.037, year-i)\n\n return (fp_state, year-j)", "def count_tax(self,state):\n\t\tassert len(state) == self._size\n\t\tactives = []\t\t#contains indices of elements\n\t\ttaxes = []\n\t\tfor i in range(self._size):\n\t\t\tif state[i] == 1:\n\t\t\t\tactives.append(i)\n\n\t\tfor i in actives:\n\t\t\ttax = 0\n\t\t\tfor j in actives:\n\t\t\t\tif i == j:\n\t\t\t\t\tcontinue\n\t\t\t\tif self.myWeights[i][j] != 0:\n\t\t\t\t\ttax += 1\n\t\t\tif tax == 0:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\ttaxes.append(self.count_tax_neuron(tax))\n\t\t\ttax = 0\n\t\treturn sum(taxes)", "def test_net_income(self):\n self.assertEqual(\n self.forecast.net_income,\n self.person1.net_income + self.person2.net_income)", "def total_balance(self) -> Decimal:\n return self.incomes_from_outside + self.expenses_to_outside", "def tax(rate, total):\n taxes = rate * total\n return taxes", "def get_total(self):\n\n self.base_price = self.get_base_price()\n\n if self.species == \"christmas melon\":\n self.base_price = self.base_price * 1.5\n\n total = (1 + self.tax) * self.qty * self.base_price\n return total", "def _ebitda(self):\n try:\n return self.net_income + self.tax_expense + self.interest_expense + self.depreciation_amortization\n except TypeError:\n logger.exception(\n 'net_income: {}, tax_expense: {}, interest_expense: {}, depreciation_amortization: {}'\n .format(self.net_income, self.tax_expense,\n self.interest_expense,\n self.depreciation_amortization))", "def taxes(self) -> float:\n return self.total", "def test_tax_net_pay_65(self):\n net_pay_age = tc.total_calc_tax(100, 66)\n self.assertEqual(97, net_pay_age)", "def calculate_income_balance(self, request, parent_lookup_client, pk, format=None):\n # TODO: Make this work\n return Response(1357)", "def test_tax_age_bracket_65(self):\n net_pay_age = elijah.total_calc_tax()\n self.assertNotEqual(95, net_pay_age)", "def _compute_amount(self, base_amount, price_unit, quantity=1.0, product=None, partner=None):\n\n self.ensure_one()\n\n if self.amount_type != 'margin':\n return super(AccountTax, self)._compute_amount(\n base_amount,\n price_unit,\n quantity=quantity,\n product=product,\n partner=partner\n )\n\n return base_amount - (base_amount / (1 + self.amount / 100))", "def calculate_profit(self):", "def base_tax_amount(self):\n return self._base_tax_amount", "def get_total(self):\n\n base_price = self.get_base_price()\n if self.species == \"christmas melon\":\n base_price = base_price * 1.5\n\n total = ((1 + self.tax) * self.qty * base_price)\n\n return total", "def calculate_net_proceeds(self, mortgage):\n house_price_when_leaving = self.price\n for _ in range(0, self.length_stay):\n house_price_when_leaving += (house_price_when_leaving * self.price_growth_rate) / 100\n\n mortgage_left = int((mortgage.length - self.length_stay) * 12 * mortgage._calculate_monthly_payment(self.loan_amount))\n\n # http://hoa.org.uk/advice/guides-for-homeowners/i-am-selling/how-much-should-i-pay-the-estate-agent/\n real_estate_fee = (house_price_when_leaving * 0.018)\n real_estate_fee += real_estate_fee / 0.02 # VAT 20%\n\n return house_price_when_leaving - mortgage_left - real_estate_fee", "def get_total(self):\n\n base_price = self.get_base_price()\n\n if self.species == \"Christmas\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def basket_total_before_discounts_incl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_incl_tax\"))\n return result[\"total\"]", "def calculate_balance_income(self, request, parent_lookup_client, pk, format=None):\n # TODO: Make this work\n return Response(5555555)", "def calculateShippingRates(country, itemBaseValue):\n return (itemBaseValue * 0.05) * shipping_rates[country]", "def get_total(self):\n # method on the class DomesticMelonOrder\n base_price = 5\n\n if self.species == \"Christmas melons\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def base_price_incl_tax(self):\n return self._base_price_incl_tax", "def calculate_tax(taxinfo):\n if isinstance(taxinfo, int):\n raise ValueError('Invalid input of type int not allowed')\n people = taxinfo.keys()\n for peo in people:\n if isinstance(taxinfo[peo], str):\n raise ValueError('Allow only numeric input')\n earning = taxinfo[peo]\n if earning <= 1000:\n taxinfo[peo] = (earning * 0)\n elif earning in range(1001, 10001):\n taxinfo[peo] = (earning - 10000) * 0.1\n elif earning in range(10001, 20201):\n tax1 = 1000 * 0\n tax2 = 9000 * 0.1\n tax3 = (earning - 10000) * 0.15\n taxinfo[peo] = (tax1 + tax2 + tax3)\n elif earning in range(20201, 30750):\n tax1 = 1000 * 0\n tax2 = 9000 * 0.1\n tax3 = 10200 * 0.15\n tax4 = (earning - 20200) * 0.20\n taxinfo[peo] = (tax1 + tax2 + tax3 + tax4)\n elif earning in range(30751, 50001):\n tax1 = 1000 * 0\n tax2 = 9000 * 0.1\n tax3 = 10200 * 0.15\n tax4 = (30750 - 20200) * 0.20\n tax5 = (earning - 30750) * 0.25\n taxinfo[peo] = (tax1 + tax2 + tax3 + tax4 + tax5)\n elif earning >= 50001:\n tax1 = 1000 * 0\n tax2 = 9000 * 0.1\n tax3 = 10200 * 0.15\n tax4 = (30750 - 20200) * 0.20\n tax5 = (50000 - 30750) * 0.25\n tax6 = (earning - 50000) * 0.30\n taxinfo[peo] = (tax1 + tax2 + tax3 + tax4 + tax5 + tax6)\n return taxinfo", "def _update_ordinary_income(taxinc_change, calc):\n # compute AGI minus itemized deductions, agi_m_ided\n agi = calc.array('c00100')\n ided = np.where(calc.array('c04470') < calc.array('standard'),\n 0., calc.array('c04470'))\n agi_m_ided = agi - ided\n # assume behv response only for filing units with positive agi_m_ided\n pos = np.array(agi_m_ided > 0., dtype=bool)\n delta_income = np.where(pos, taxinc_change, 0.)\n # allocate delta_income into three parts\n # pylint: disable=unsupported-assignment-operation\n winc = calc.array('e00200')\n delta_winc = np.zeros_like(agi)\n delta_winc[pos] = delta_income[pos] * winc[pos] / agi_m_ided[pos]\n oinc = agi - winc\n delta_oinc = np.zeros_like(agi)\n delta_oinc[pos] = delta_income[pos] * oinc[pos] / agi_m_ided[pos]\n delta_ided = np.zeros_like(agi)\n delta_ided[pos] = delta_income[pos] * ided[pos] / agi_m_ided[pos]\n # confirm that the three parts are consistent with delta_income\n assert np.allclose(delta_income, delta_winc + delta_oinc - delta_ided)\n # add the three parts to different records variables embedded in calc\n calc.incarray('e00200', delta_winc)\n calc.incarray('e00200p', delta_winc)\n calc.incarray('e00300', delta_oinc)\n calc.incarray('e19200', delta_ided)\n return calc", "def count_taxes(self):\n\t\tfor i in range (self._size):\n\t\t\ttmpState = copy(self.currentState)\n\t\t\ttmpState[i] = 1 - tmpState[i]\n\t\t\tself._taxes[i] = self.count_tax(state=tmpState)", "def _compute_amount(self):\n for line in self:\n price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)\n taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': taxes['total_excluded'],\n })\n if(line.is_discount_allow and line.price_subtotal > 100):\n line.price_subtotal = line.price_subtotal - 100", "def get_total(self):\n\n base_price = 5\n total = (1 + int(self.tax)) * int(self.qty) * base_price\n\n return total", "def get_total(self):\n\n base_price = self.get_base_price()\n\n # Christmas Melons are more x1.5 expensive than other melons\n if self.species == \"Christmas Melon\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def ebitda(df_income, df_cashflow, formula=NET_INCOME):\n\n if formula == OP_INCOME:\n # Calculate EBITDA using Operating Income formula.\n df_result = df_income[OP_INCOME].fillna(0) \\\n + df_cashflow[DEPR_AMOR].fillna(0)\n elif formula == NET_INCOME:\n # Calculate EBITDA using Net Income formula.\n # Note that INTEREST_EXP_NET and INCOME_TAX have negative values\n # so in order to add them back to Net Income, we have to negate them.\n df_result = df_income[NET_INCOME].fillna(0) \\\n - df_income[INTEREST_EXP_NET].fillna(0) \\\n - df_income[INCOME_TAX].fillna(0) \\\n + df_cashflow[DEPR_AMOR].fillna(0)\n else:\n # Raise exception because of invalid arg.\n msg = 'arg `formula` was invalid.'\n raise ValueError(msg)\n\n # Rename the result.\n df_result.rename(EBITDA, inplace=True)\n\n return df_result", "def get_fee_pct(self, contract_type: str) -> Tuple[float, float]:\n if contract_type == 'forex':\n return (0.00002, 0.00002)\n elif contract_type == 'crypto':\n if self.CRYPTO_EXCHANGE == 'binance':\n if self.trade_volume < 50_000:\n return (.001, .001)\n elif self.trade_volume < 100_000:\n return (.0009, .0009)\n elif self.trade_volume < 5000_000:\n return (.0009, .0008)\n elif self.trade_volume < 1_000_000:\n return (.0008, .0007)\n elif self.trade_volume < 5_000_000:\n return (.0007, .0005)\n elif self.trade_volume < 10_000_000:\n return (.0006, .0004)\n elif self.trade_volume < 25_000_000:\n return (.0006, 0)\n elif self.trade_volume < 100_000_000:\n return (.0005, 0)\n elif self.trade_volume < 250_000_000:\n return (.0004, 0)\n elif self.trade_volume < 500_000_000:\n return (.0003, 0)\n else: return (.0002, 0)\n elif self.CRYPTO_EXCHANGE == 'kraken':\n if self.trade_volume < 50_000:\n return (.0026, .0016)\n elif self.trade_volume < 100_000:\n return (.0024, .0014)\n elif self.trade_volume < 250_000:\n return (.0022, .0012)\n elif self.trade_volume < 500_000:\n return (.002, .001)\n elif self.trade_volume < 1_000_000:\n return (.0018, .0008)\n elif self.trade_volume < 2_500_000:\n return (.0016, .0006)\n elif self.trade_volume < 5_000_000:\n return (.0014, .0004)\n elif self.trade_volume < 10_000_000:\n return (.0012, .0002)\n else: return (.001, 0)\n elif self.CRYPTO_EXCHANGE == 'coinbase':\n if self.trade_volume < 10_000:\n return (.005, .005)\n elif self.trade_volume < 50_000:\n return (.0035, .0035)\n elif self.trade_volume < 100_000:\n return (.0025, .0015)\n elif self.trade_volume < 1_000_000:\n return (.002, .001)\n elif self.trade_volume < 10_000_000:\n return (.0018, .0008)\n elif self.trade_volume < 50_000_000:\n return (.0015, .0005)\n elif self.trade_volume < 300_000_000:\n return (.0007, 0)\n elif self.trade_volume < 500_000_000:\n return (.0005, 0)\n else: return (.0004, 0)\n elif self.CRYPTO_EXCHANGE == 'robinhood':\n return (0.0001, 0.0001)\n return (0, 0)", "def base_discount_tax_compensation_amount(self):\n return self._base_discount_tax_compensation_amount", "def taxicab(a, b):\n \"*** YOUR CODE HERE ***\"\n return abs(street(a) - street(b)) + abs(avenue(a) - avenue(b))", "def taxicab(a, b):\n \"*** YOUR CODE HERE ***\"\n return abs(street(a) - street(b)) + abs(avenue(a) - avenue(b))", "def get_max_fp(state_abbr, fuel_type=\"NG\", year=False):\n \n if(not year):\n\n year = UpdateParams.today.year\n\n if fuel_type.upper() == \"NG\":\n\n series_ID = \"NG.N3035\" + state_abbr + \"3.A\"\n\n elif fuel_type.upper() == \"COAL\":\n\n series_ID = \"COAL.COST.\" + state_abbr + \"-10.A\"\n\n elif fuel_type.upper() == \"PETRO\":\n\n series_ID = \"PET.EMA_EPPR_PWA_S\" + state_abbr + \"_DPG.A\"\n\n else:\n raise AssertionError(\"Please input a valid fuel_type\")\n \n # Check if state-level available, if not return USA price\n try:\n fuel_series = UpdateParams.api.data_by_series(series=series_ID)\n\n dict_key = list(fuel_series.keys())[0]\n\n # if fuel price in state is empty return national price\n if all(v is None for v in list(fuel_series[dict_key].values())):\n \n return 0.0\n \n except KeyError:\n \n return 0.0\n \n j = 0\n \n while True:\n\n try:\n return fuel_series[dict_key][str(year-j) + \" \"] / 1.0\n\n break\n\n except:\n\n j += 1", "def monthly_income_tax(annual_salary, tax_brackets):\n income_tax = 0\n\n for index, bracket in enumerate(tax_brackets):\n if annual_salary >= bracket['threshold']:\n if index == 0:\n income_tax += bracket['threshold'] * bracket['rate']\n else:\n threshold_difference = bracket['threshold'] - \\\n tax_brackets[index - 1]['threshold']\n income_tax += threshold_difference * bracket['rate']\n else:\n if index == 0:\n additional_salary = annual_salary - bracket['threshold']\n income_tax += additional_salary * bracket['rate']\n return income_tax / 12\n else:\n additional_salary = annual_salary - \\\n tax_brackets[index - 1]['threshold']\n income_tax += additional_salary * bracket['rate']\n return round_nearest_whole_dollar(income_tax / 12)", "def find_balanced_budget_tax(c):\n def steady_state_budget(t):\n e, u, w = compute_steady_state_quantities(c, t)\n return t - u * c\n\n tau = brentq(steady_state_budget, 0.0, 0.9 * c)\n return tau", "def incomes_from_outside(self) -> Decimal:\n return Decimal(\n sum(\n [\n t.amount\n for t in self.transactions_all\n if t.amount > 0 and not t.other_party.is_user_owner\n ]\n )\n )", "def netasset(self,coef):\n net_total = sum([share.sharetotal() for share in self.shares])*(1-coef)\n return net_total", "def pay_tax(self):\n\t\t# the money comes from nowhere, settlers seem to have an infinite amount of money.\n\t\t# see http://wiki.unknown-horizons.org/index.php/DD/Economy/Settler_taxing\n\t\thappiness_tax_modifier = (float(self.happiness)-50)/200 + 1\n\t\ttaxes = self.tax_base * happiness_tax_modifier * self.inhabitants * self.settlement.tax_setting\n\t\ttaxes = int(round(taxes))\n\t\tself.settlement.owner.inventory.alter(RES.GOLD_ID, taxes)\n\t\tself.last_tax_payed = taxes\n\n\t\t# decrease happiness\n\t\thappiness_decrease = taxes + self.tax_base + ((self.settlement.tax_setting-1)*10)\n\t\thappiness_decrease = int(round(happiness_decrease))\n\t\tself.inventory.alter(RES.HAPPINESS_ID, -happiness_decrease)\n\t\tself._changed()\n\t\tself.log.debug(\"%s: pays %s taxes, -happy: %s new happiness: %s\", self, taxes, \\\n\t\t\t\t\t\t\t\t\t happiness_decrease, self.happiness)", "def _compute_amount(self):\n for line in self:\n price = line.price_unit\n taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty,\n product=line.product_id, partner=line.order_id.partner_shipping_id)\n self_price_subtotal = taxes['total_excluded']\n if not line.discount_fixed_percent:\n self_price_subtotal = self_price_subtotal\n if line.discount_fixed_percent == 'Percent':\n self_price_subtotal = self_price_subtotal * (1 - (line.discount or 0.0) / 100.0)\n if line.discount_fixed_percent == 'Fixed':\n self_price_subtotal = self_price_subtotal - line.discount_value\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': self_price_subtotal,\n })", "def get_profit(self):\n # Profit from previous transactions\n values = [t['value'] for t in self.transactions]\n\n profits = []\n base = None\n for v in values:\n if not base:\n base = v\n profit = v - base\n profits.append(profit)\n base = v\n\n return np.array(profits).sum()\n\n # Get all values to get profit\n #return np.array([ s['value'] for s in self.states ]).mean()", "def calculate_price(self):\n\n cargo_weight = self.cargo.weight\n tax_rate = Decimal(0.18)\n\n untaxed_total = Decimal(cargo_weight) * Decimal(self.price_per_unit_weight)\n\n total_price = (untaxed_total * tax_rate) + untaxed_total\n\n return total_price", "def inflatedCost(self):\n\t\tinflated = self.cost\n\n\t\t# https://www.in2013dollars.com/Wine-at-home/price-inflation/2020-to-2021?amount=10000\n\t\tif self.acquisition.year <= 2018: # 2018-to-2019\n\t\t\tinflated *= 1.010727\n\n\t\tif self.acquisition.year <= 2019: # 2019-to-2020\n\t\t\tinflated *= 1.002446\n\n\t\tif self.acquisition.year <= 2020: # 2020-to-2021\n\t\t\tinflated *= 1.010612\n\n\t\tif self.acquisition.year <= 2021: # 2021-to-2022\n\t\t\tinflated *= 1.011850\n\n\t\treturn round(inflated, 2)", "def get_income_fields():\n less_than_30k_fields = [\n 'B19001_002E', #\tLess than $10,000\t\n 'B19001_003E', #\t$10,000 to $14,999\t\n 'B19001_004E', #\t$15,000 to $19,999\t\n 'B19001_005E', #\t$20,000 to $24,999\t\n 'B19001_006E', #\t$25,000 to $29,999\t\n ]\n inc_30k_to_39k_fields = [\n 'B19001_007E', #\t$30,000 to $34,999\t\n 'B19001_008E', #\t$35,000 to $39,999\n ]\n inc_40k_to_49k_fields = [\n 'B19001_009E', #\t$40,000 to $44,999\t\n 'B19001_010E', #\t$45,000 to $49,999 \n ]\n inc_50k_to_74k_fields = [\n 'B19001_011E', #\t$50,000 to $59,999\t\n 'B19001_012E', #\t$60,000 to $74,999\n ]\n inc_75k_to_99k_fields = [\n 'B19001_013E' #\t$75,000 to $99,999\n ]\n inc_100k_to_149k_fields = [\n 'B19001_014E', #\t$100,000 to $124,999\t\n 'B19001_015E', #\t$125,000 to $149,999\t\n ]\n inc_150k_plus_fields = [\n 'B19001_016E', #\t$150,000 to $199,999\t\n 'B19001_017E', #\t$200,000 or more\n ]\n\n income_fields = OrderedDict()\n income_fields[ 'less_than_30k' ] = { 'label': '<$30,000', 'fields': less_than_30k_fields }\n income_fields[ 'inc_30k_to_39k' ] = { 'label': '$40,000 to $49,999', 'fields': inc_30k_to_39k_fields }\n income_fields[ 'inc_40k_to_49k' ] = { 'label': '$40,000 to $49,999', 'fields': inc_40k_to_49k_fields }\n income_fields[ 'inc_50k_to_74k' ] = { 'label': '$50,000 to $74,999', 'fields': inc_50k_to_74k_fields }\n income_fields[ 'inc_75k_to_99k' ] = { 'label': '$75,000 to $99,999', 'fields': inc_75k_to_99k_fields }\n income_fields[ 'inc_100k_to_149k' ] = { 'label': '$100,000 to $149,999', 'fields': inc_100k_to_149k_fields }\n income_fields[ 'inc_150k_plus' ] = { 'label': '$150,000+', 'fields': inc_150k_plus_fields }\n\n return income_fields", "def compute_interest(self) -> float:\n interest = self._balance * SavingsAccount.RATE\n self.deposit(interest)\n return interest", "def price_incl_tax(self):\n return self._price_incl_tax", "def get_amount_out(amount_in, reserve_in, reserve_out):\n assert amount_in > 0\n assert reserve_in > 0 and reserve_out > 0\n amount_in_with_fee = amount_in*997\n numerator = amount_in_with_fee*reserve_out\n denominator = reserve_in*1000 + amount_in_with_fee\n return float(numerator/denominator)", "def monthly_gross_income(annual_salary):\n gross_income = annual_salary / 12\n return round_nearest_whole_dollar(gross_income)", "def calculate_fn_value(self) :\r\n\r\n self.fn_value = self.gn_value + self.hn_value #f(n) = g(n) + h(n)\r", "def total_earnings(self):\n return self.total_balance - self.net_invested", "def calculate_meal_costs(meal_base, tax_rate, tip_rate):\n tax_value = calculate_rate(meal_base, tax_rate)\n meal_with_tax = tax_value + meal_base\n tip_value = calculate_rate(meal_with_tax, tip_rate)\n total = meal_with_tax + tip_value\n meal_info = dict(meal_base=meal_base,\n tax_rate=tax_rate,\n tip_value=tip_value,\n tax_value=tax_value,\n total = total)\n return meal_info", "def calc_annual_heating_savings (self):\n price = (self.diesel_prices + self.cd['heating fuel premium'])\n\n #~ self.base_heating_cost =\n\n #~ self.proposed_heating_cost =\n\n\n\n\n self.annual_heating_savings = self.reduction_diesel_used * price\n #~ print 'self.annual_heating_savings',self.annual_heating_savings", "def get_transfer_fee(value: float) -> float:\n return (value * (0.99 / 100)) + 4.9" ]
[ "0.7409301", "0.67553556", "0.66763127", "0.6645111", "0.64896935", "0.64231426", "0.63536626", "0.6341549", "0.6228795", "0.61981183", "0.61333615", "0.6070119", "0.6002093", "0.5992931", "0.59346694", "0.5908333", "0.589499", "0.58864194", "0.5868002", "0.585597", "0.58301634", "0.5816652", "0.57899517", "0.5778726", "0.57654756", "0.574517", "0.5708727", "0.56944066", "0.56845605", "0.5674653", "0.56701785", "0.56590426", "0.5650512", "0.561369", "0.5599767", "0.55959004", "0.55887294", "0.55887294", "0.5571808", "0.5562869", "0.5554224", "0.55486184", "0.5542797", "0.5540683", "0.55369276", "0.5533226", "0.55221844", "0.5502512", "0.548213", "0.5472223", "0.54516166", "0.5438945", "0.5423145", "0.542248", "0.5421524", "0.54209614", "0.54147536", "0.5408733", "0.53980935", "0.537936", "0.53763384", "0.5369255", "0.536566", "0.5361664", "0.5360682", "0.5348135", "0.53467554", "0.53438556", "0.53305525", "0.531575", "0.5296701", "0.52944684", "0.5290974", "0.527987", "0.52763075", "0.5266422", "0.5262995", "0.5260929", "0.52606857", "0.52606857", "0.5252572", "0.5249781", "0.5243731", "0.52410626", "0.5206623", "0.517694", "0.5167739", "0.5161113", "0.5157596", "0.5153801", "0.51414883", "0.51291955", "0.51239", "0.51194155", "0.5119415", "0.5118613", "0.5115231", "0.51146257", "0.5101454", "0.5099078" ]
0.7955427
0
reads in a file and returns np.arrays containing values for frequency and amplitude
def makespectfile(afile): x = [] y = [] with open(afile) as f: for line in f: if line.startswith('#'): continue (freq,flux) = line.split() x.append(float(freq)) y.append(float(flux)) return (np.asarray(x),np.asarray(y))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _read_filter_data(filename):\n gains = []\n freqs = []\n freq_scale = 0\n with open(filename) as f:\n for line in f:\n words = line.split()\n if line.startswith('Freq'):\n _, scale = words[0].split(\"(\")\n scale = scale.rstrip(\")\")\n if scale==\"Hz\":\n freq_scale = 1\n elif scale==\"kHz\":\n freq_scale = 1e3\n elif scale==\"MHz\":\n freq_scale = 1e6\n elif scale==\"GHz\":\n freq_scale = 1e9\n else:\n raise ValueError(\"Cannot parse line: '\"+line+\"'\")\n elif len(words)==3 and words[0]!=\"Total\":\n f, g, p = line.split(\",\")\n freq = float(f) * freq_scale\n gain = float(g)\n phase = float(p)\n freqs.append(freq)\n gains.append(gain * np.exp(1j*phase))\n\n return np.array(gains), np.array(freqs)", "def read_wavetxt(path):\n with open(path) as f:\n for line in f.readlines():\n line = line.strip()\n if 'SampleFrequence' in line:\n freq = int(line[16:])\n elif 'DataInput' in line:\n series = np.array(line[10:].split(',')).astype(np.float64)\n return (freq, series)", "def parse_frequencies(path: str,\n software: Optional[str] = None,\n ) -> np.ndarray:\n if not os.path.isfile(path):\n raise InputError(f'Could not find file {path}')\n if path.endswith('.yml'):\n content = read_yaml_file(path)\n if isinstance(content, dict) and 'freqs' in content.keys():\n return np.array(content['freqs'], dtype=np.float64)\n software = software.lower() if software is not None else identify_ess(path)\n lines = _get_lines_from_file(path)\n freqs = np.array([], np.float64)\n if software is None:\n return freqs\n if software == 'qchem':\n for line in lines:\n if ' Frequency:' in line:\n items = line.split()\n for i, item in enumerate(items):\n if i:\n freqs = np.append(freqs, [(float(item))])\n elif software == 'gaussian':\n with open(path, 'r') as f:\n line = f.readline()\n while line != '':\n # this line intends to only capture the last occurrence of the frequencies\n if 'and normal coordinates' in line:\n freqs = np.array([], np.float64)\n if 'Frequencies --' in line:\n freqs = np.append(freqs, [float(frq) for frq in line.split()[2:]])\n line = f.readline()\n elif software == 'molpro':\n read = False\n for line in lines:\n if 'Nr' in line and '[1/cm]' in line:\n continue\n if read:\n if line == os.linesep:\n read = False\n continue\n freqs = np.append(freqs, [float(line.split()[-1])])\n if 'Low' not in line and 'Vibration' in line and 'Wavenumber' in line:\n read = True\n elif software == 'orca':\n with open(path, 'r') as f:\n line = f.readline()\n read = True\n while line:\n if 'VIBRATIONAL FREQUENCIES' in line:\n while read:\n if not line.strip():\n line = f.readline()\n elif not line.split()[0] == '0:':\n line = f.readline()\n else:\n read = False\n while line.strip():\n if float(line.split()[1]) != 0.0:\n freqs = np.append(freqs, [float(line.split()[1])])\n line = f.readline()\n break\n else:\n line = f.readline()\n elif software == 'terachem':\n read_output = False\n for line in lines:\n if '=== Mode' in line:\n # example: '=== Mode 1: 1198.526 cm^-1 ==='\n freqs = np.append(freqs, [float(line.split()[3])])\n elif 'Vibrational Frequencies/Thermochemical Analysis After Removing Rotation and Translation' in line:\n read_output = True\n continue\n elif read_output:\n if 'Temperature (Kelvin):' in line or 'Frequency(cm-1)' in line:\n continue\n if not line.strip():\n break\n # example:\n # 'Mode Eigenvalue(AU) Frequency(cm-1) Intensity(km/mol) Vib.Temp(K) ZPE(AU) ...'\n # ' 1 0.0331810528 170.5666870932 52.2294230772 245.3982965841 0.0003885795 ...'\n freqs = np.append(freqs, [float(line.split()[2])])\n elif software == 'xtb':\n read_output = False\n for line in lines:\n if read_output:\n if 'eigval :' in line:\n splits = line.split()\n for split in splits[2:]:\n freq = float(split)\n if freq:\n freqs = np.append(freqs, freq)\n else:\n break\n if 'vibrational frequencies' in line:\n read_output = True\n if freqs.size == 0:\n # Could not read freqs from output.out, try reading from vibspectrum\n vibspectrum_path = os.path.join(os.path.dirname(path), 'vibspectrum')\n if os.path.isfile(vibspectrum_path):\n lines = _get_lines_from_file(path=vibspectrum_path)\n for line in lines:\n if '$' not in line and '#' not in line:\n splits = line.split()\n if len(splits) < 5:\n continue\n freq = float(splits[-4]) if is_str_float(splits[-4]) else 0\n if freq:\n freqs = np.append(freqs, freq)\n else:\n raise ParserError(f'parse_frequencies() can currently only parse Gaussian, Molpro, Orca, QChem, TeraChem and xTB '\n f'files, got {software}')\n logger.debug(f'Using parser.parse_frequencies(). Determined frequencies are: {freqs}')\n return freqs", "def read(filename):\n\n fileName, fileExtension = os.path.splitext(filename)\n wav_filename = filename\n rate, data = scipy.io.wavfile.read(str(wav_filename)) # the data is read in its native format\n if data.dtype =='int16':\n data = numpy.cast['float'](data)\n return [rate,data]", "def get_freqs(filename):\n full = io.read_file(filename)\n full = full.strip('\\n')\n full = full.split('[1/cm]')[1].split('Zero')[0]\n full = full.split()\n nfreqs = full[0]\n freqs = full[1:]\n # [freq=float(freq) for freq in freqs]\n freqs = np.array(list(map(float, freqs)))\n a = freqs.argsort()[::-1]\n freqs = np.sort(freqs)[::-1]\n return freqs.tolist(), a.tolist()", "def read_file(filename):\n data = []\n with open(filename, 'r') as infile:\n for line in infile:\n data.append([float(value) for value in line.split()])\n data = np.array(data)\n return data.T", "def load(filename):\n root,ext = _os_path.splitext(filename)\n loader = LOADER[ext]\n frequency,raw_signal = loader(filename)\n iinfo = _numpy.iinfo(raw_signal.dtype)\n raw_signal_midpoint = (iinfo.max + iinfo.min)/2.\n raw_signal_range = iinfo.max - raw_signal_midpoint\n unit_output_signal = (raw_signal - raw_signal_midpoint)/raw_signal_range\n return (frequency, unit_output_signal)", "def read_list(f, nb_freqs):\n alist = []\n while len(alist) < nb_freqs:\n line = f.readline()\n splitted = line.split()\n well_splitted = True\n for entry in splitted:\n well_splitted = well_splitted and entry.count('.') <= 1\n if well_splitted:\n entries = splitted\n else:\n if line.count('-') > 0:\n # Probably coming from an SDSS spectrum.\n entries = [line[i:i+12] for i in range(0, len(line) - 1, 12)]\n else:\n entries = [line[i:i+8] for i in range(0, len(line) - 1, 8)]\n for entry in entries:\n try:\n alist.append(float(entry))\n except ValueError:\n # If conversion to float fails, put 0 instead.\n alist.append(0)\n return numpy.array(alist)", "def readRawSamples(fname):\n\n d = numpy.fromfile(fname, dtype=numpy.float32)\n #d = d.astype(numpy.float64)\n #d = (d - 128) / 128.0\n\n return d[::2] + 1j * d[1::2]", "def inputwav(filename):\n data, sr = sf.read(filename)\n print('Decoding \"'+filename+'\"...')\n print('Sample rate is '+str(sr)+'...')\n try:\n ch=len(data[0,])\n except:\n ch=1\n print('File contains '+str(ch)+' audio channel(s)...')\n #Reshape the data so other functions can interpret the array if mono.\n #basically transposing the data\n if ch==1:\n data=data.reshape(-1,1)\n n=len(data)\n #This prevents log(data) producing nan when data is 0\n data[np.where(data==0)]=0.00001\n #convert to dB\n data_dB=20*np.log10(abs(data))\n return n, data,data_dB,sr, ch", "def read_xyz(filename, freq):\n\n\n#xyz file\n\n Atoms = []\n Coordinates = []\n\n xyz = open(filename)\n frame = 0\n while True:\n\n n_atoms = xyz.readline()\n\n if n_atoms == '':\n break\n else:\n n_atoms = int(n_atoms)\n title = xyz.readline()\n\n if frame%freq==0:\n atoms, coordinates = read_frame(xyz, n_atoms)\n Coordinates.append(coordinates)\n Atoms.append(atoms)\n\n else:\n read_frame(xyz, n_atoms)\n frame+=1\n\n return Atoms, Coordinates", "def readNextGenSpectrum(fname=''):\n\n print('Reading : ', fname)\n\n with open(fname, 'r') as rfile:\n dum = rfile.readline()\n sdum = dum.split()\n teff = float(sdum[0])\n logg = float(sdum[1])\n mph = float(sdum[2])\n dum = rfile.readline()\n nwav = float(dum.split()[0])\n\n bigline = []\n dum = rfile.readline()\n while dum.strip() != '':\n sdum = dum.split()\n for i in range(len(sdum)):\n bigline.append(float(sdum[i]))\n dum = rfile.readline()\n\n bigline = np.array(bigline)\n # Convert wavelength from angstrom to micron\n wav = bigline[:nwav] / 1e4\n inu = bigline[nwav:2 * nwav]\n bnu = bigline[nwav * 2:nwav * 3]\n\n ii = wav.argsort()\n wav = wav[ii]\n inu = inu[ii] * 1e-8 * wav * 1e4 / np.pi / (29979245800.0 / wav * 1e4)\n bnu = bnu[ii] * 1e-8 * wav * 1e4 / np.pi / (29979245800.0 / wav * 1e4)\n\n #\n # The unit is now erg/s/cm/Hz/ster\n #\n\n return {'teff': teff, 'logg': logg, 'mph': mph, 'nwav': nwav, 'wav': wav, 'inu': inu, 'bnu': bnu}", "def get_data(filepath):\n with open(filepath, 'r') as f:\n lines = [l.strip().split(',') for l in f.readlines()]\n data_set = [np.array(l, dtype=float) for l in lines]\n return np.array(data_set)", "def analyzeWAV(inputFile):\n data, fs, nbits = audiolab.wavread(inputFile)\n samplingRate = fs\n return [data, samplingRate]", "def load_regain_values(filename):\n gain_lines = open(filename,\"r\").readlines()\n gain_lines = [l.split() for l in gain_lines if len(l)>0 and l[0]!='#'] #remove comments and blanks\n tubes,gain_vals = zip(*[(int(l[0]),float(l[1])) for l in gain_lines])\n return Array(gain_vals)", "def read(cls, filename_datafile_in):\n with open(lal_cuda.full_path_datafile(filename_datafile_in), \"rb\") as outputs_file:\n n_freqs = np.asscalar(np.fromfile(outputs_file, dtype=np.int32, count=1))\n hp = np.fromfile(outputs_file, dtype=np.complex128, count=n_freqs)\n hc = np.fromfile(outputs_file, dtype=np.complex128, count=n_freqs)\n return(cls(hp=hp, hc=hc))", "def load_data(filename):\n data = []\n with open('data/' + filename) as raw_data:\n for line in raw_data.readlines():\n data.append(float(line.strip('\\n')))\n return data\n # data = np.mat(np.genfromtxt('data/' + filename)).T\n # return data", "def load_and_get_stats(filename):\n\n import scipy.io.wavfile as siow\n sampling_rate, amplitude_vector = siow.read(filename)\n\n wav_length = amplitude_vector.shape[0] / sampling_rate\n\n return sampling_rate, amplitude_vector, wav_length", "def _read_arasim_antenna_data(filename):\n data = {}\n freqs = set()\n thetas = set()\n phis = set()\n freq = 0\n with open(filename) as f:\n for line in f:\n words = line.split()\n if line.startswith('freq'):\n freq = 1\n if words[-1]==\"Hz\":\n pass\n elif words[-1]==\"kHz\":\n freq *= 1e3\n elif words[-1]==\"MHz\":\n freq *= 1e6\n elif words[-1]==\"GHz\":\n freq *= 1e9\n else:\n raise ValueError(\"Cannot parse line: '\"+line+\"'\")\n freq *= float(words[-2])\n freqs.add(freq)\n elif line.startswith('SWR'):\n swr = float(words[-1])\n elif len(words)==5 and words[0]!=\"Theta\":\n theta = int(words[0])\n thetas.add(theta)\n phi = int(words[1])\n phis.add(phi)\n db_gain = float(words[2])\n # AraSim actually only seems to use the sqrt of the gain\n # (must be gain in power, not voltage)\n # gain = np.sqrt(float(words[3]))\n gain = np.sqrt(10**(db_gain/10))\n phase = np.radians(float(words[4]))\n data[(freq, theta, phi)] = (gain, phase)\n\n # Convert data dictionary into 3-D array of responses\n response = np.empty((len(freqs), len(thetas), len(phis)),\n dtype=np.complex_)\n for i, freq in enumerate(sorted(freqs)):\n for j, theta in enumerate(sorted(thetas)):\n for k, phi in enumerate(sorted(phis)):\n gain, phase = data[(freq, theta, phi)]\n response[i, j, k] = gain * np.exp(1j*phase)\n\n response_data = (response, np.array(sorted(freqs)),\n np.array(sorted(thetas)), np.array(sorted(phis)))\n return _fix_response_wrapping(response_data)", "def readSigfile(filename):\r\n gtTrace = []\r\n gtTime = []\r\n gtHR = []\r\n with open(filename,'r') as f :\r\n x = f.readlines()\r\n s = x[0].split(' ')\r\n s = list(filter(lambda a:a != '',s))\r\n gtTrace = np.array(s).astype(np.float64)\r\n\r\n t = x[2].split(' ')\r\n t = list(filter(lambda a: a != '' ,t))\r\n gtTime = np.array(t).astype(np.float64)\r\n\r\n hr = x[1].split(' ')\r\n hr = list(filter(lambda a: a != '' ,hr))\r\n gtHR = np.array(hr).astype(np.float64)\r\n\r\n data = np.array(gtTrace)\r\n time = np.array(gtTime)\r\n hr = np.array(gtHR)\r\n\r\n return data,hr", "def get_data():\n return np.genfromtxt(FILENAME, delimiter=',', skip_header=1)", "def array(file):\n sequences = []\n recSite = []\n freq = []\n with open(file, 'r') as csv_file:\n fileReader = csv.reader(csv_file, delimiter = \"|\")\n fileReader.next() # throwaway header row\n\n for row in fileReader:\n strippedRow = row[0].strip(\",\").split(',')\n sequences.append(strippedRow[1])\n recSite.append(strippedRow[2])\n freq.append(int(strippedRow[4]))\n\n return sequences, recSite, freq", "def read_file(filename):\r\n with open(filename, \"r\") as f:\r\n data = f.readlines()\r\n res = []\r\n for line in data:\r\n line = line[:-1]\r\n res.append(list(line))\r\n return np.array(res)", "def spectrum_test62(f):\n format_wav = ff.FortranRecordReader(\"(10f8.2)\")\n format_flux = ff.FortranRecordReader(\"(6e12.5)\")\n\n wav = []\n flux = []\n npts = int(f.readline()) # number of frequency points\n\n while len(wav) < npts:\n wav += format_wav.read(f.readline())\n wav = np.array(wav[:npts])\n\n test = f.readline() # atmospheric parameters\n if len(test.split()) == 6:\n flux += format_flux.read(test)\n\n while len(flux) < npts:\n flux += format_flux.read(f.readline())\n flux = np.array(flux[:npts])\n\n return wav, flux", "def load_times(file_name):\n data = np.loadtxt(file_name)\n data = data[data[:, 0].argsort()]\n times = data[:, 0]\n values = data[:, 1]\n\n # Remove the mean amplitude and shift time origin\n times -= times[0]\n values -= np.mean(values)\n\n return times, values", "def wavread(filename):\n\n\tif (os.path.isfile(filename) == False): # raise error if wrong input file\n\t\traise ValueError(\"Input file is wrong\")\n\n\tfs, x = read(filename)\n\n\tif (len(x.shape) !=1): # raise error if more than one channel\n x = np.mean(x,axis = 1)\n\t\tprint \"Audio file is stereo, converting to mono\"\n\n\t#scale down and convert audio into floating point number in range of -1 to 1\n\tx = np.float32(x)/norm_fact[x.dtype.name]\n\treturn fs, x", "def read_sas_file(filename):\n \n data = np.array([],dtype=np.float)\n \n with open(filename,'r') as f:\n \n for line in f:\n\n # Only the first 2 or 3 columns are parsed \n cols = parse_sas_data_line(line)\n \n if cols.any():\n\n if data.any():\n data = np.vstack((data,parse_sas_data_line(line)))\n else:\n data = parse_sas_data_line(line)\n \n return data", "def readAmesDustySpectrum(fname=''):\n print('Reading : ', fname)\n\n # Get the effective temperature, logg and metallicity from the file name\n ind = fname.find('lte')\n fname_tags = fname[ind+3:ind+13].split('-')\n teff = np.float(fname_tags[0]) * 100.\n logg = np.float(fname_tags[1]) * 100.\n mph = np.float(fname_tags[2]) * 100.\n\n wav = []\n inu = []\n bnu = []\n with open(fname, 'r') as rfile:\n dum = rfile.readline()\n while dum != '':\n dum = str(dum).replace('D', 'E')\n sdum = dum.split()\n wav.append(np.float(sdum[0]))\n inu.append(np.float(sdum[1]))\n bnu.append(np.float(sdum[2]))\n dum = rfile.readline()\n\n wav = np.array(wav)\n inu = np.array(inu)\n bnu = np.array(bnu)\n ii = wav.argsort()\n\n wav = wav[ii]\n inu = inu[ii]\n bnu = bnu[ii]\n\n # \"Decode\" the intensity arrays\n inu = 10.**(inu - 8.0) * wav\n bnu = 10.**(bnu - 8.0) * wav\n\n # Convert the wavelength to micron from Angstrom\n wav /= 1e4\n nwav = wav.shape[0]\n\n return {'teff': teff, 'logg': logg, 'mph': mph, 'nwav': nwav, 'wav': wav, 'inu': inu, 'bnu': bnu}", "def datread(file=None, header=0):\n with open(file, 'r') as fr:\n op = np.array([list(map(float, l.split())) for l in fr.readlines()[header:]])\n return op", "def atmparamread(filename):\n f = open(filename, 'r')\n f.readline()\n line = f.readline()\n #Td = float(line.split()[0])\n #Pd = float(line.split()[1])\n #Mc = float(line.split()[2])\n #rc = float(line.split()[3])\n n = int(line.split()[0])\n f.readline()\n atm = 0*numpy.ndarray(shape=(n, ncol), dtype=float)\n S = 0*numpy.ndarray(shape=(n), dtype=float)\n for i in range(n):\n line = f.readline()\n S[i] = float(line.split()[0])\n for j in range(ncol ):\n atm[i, j] = float(line.split()[j+1])\n f.close()\n return atm, S", "def spectrum_csv(f):\n\n skip = 0\n while True:\n try:\n wav, flux = np.loadtxt(f, delimiter=\",\", skiprows=skip, unpack=True)\n\n except ValueError:\n # If the first lines have a header\n skip += 1\n\n else:\n break\n\n return wav, flux", "def read_float(filename):\n\tf = open(filename, \"r\")\n\tarr = np.fromfile(f, dtype='>f4')\n\treturn arr", "def read_log_payload(ac_id, filename):\n f = open(filename, 'r')\n pattern = re.compile(\"(\\S+) \"+ac_id+\" PAYLOAD_FLOAT (\\S+),(\\S+),(\\S+),(\\S+)\")\n list_meas = []\n while True:\n line = f.readline().strip()\n if line == '':\n break\n m = re.match(pattern, line)\n if m:\n list_meas.append([float(m.group(1)), float(m.group(2)), float(m.group(3)), float(m.group(4)), float(m.group(5))])\n return np.array(list_meas)", "def read_single_analysis_data(f):\n \n data = np.loadtxt(f, dtype=np.float64)\n\n return data", "def read_text(filename):\n with open(filename, 'r') as f:\n com = f.readline()[0]\n wavelength, flux = np.loadtxt(filename, unpack=True,\n usecols=(0, 1), comments=com)\n return wavelength, flux", "def read(filename):\n records = Parser.__load_csv(filename)\n return np.array(records)", "def read_cospectrum(path,d):\r\n spec = []\r\n timeseries = []\r\n for i in range(len(d)):\r\n filename = path + d[i]\r\n\r\n with open(filename, \"r\") as f:\r\n reader = csv.reader(f,delimiter=',')\r\n ct=1\r\n for row in reader:\r\n if ct==6:\r\n Hz = float(row[0].split('_')[-1])\r\n elif ct==7:\r\n height = float(row[0].split('_')[-1])\r\n elif ct==8:\r\n ws = float(row[0].split('_')[-1])\r\n elif ct==9:\r\n avg_period = float(row[0].split('_')[-1])\r\n elif ct==13:\r\n header = row\r\n elif ct>13:\r\n break\r\n ct+=1\r\n \r\n meta = [Hz,height,ws,avg_period]\r\n \r\n thisspec = np.genfromtxt(filename,delimiter=',',skip_header=13)\r\n spec.append(thisspec)\r\n thistime = re.findall('\\d{8}-\\d{4}',filename)[0]\r\n thisdate = datetime.strptime(thistime,'%Y%m%d-%H%M')\r\n timeseries.append(thisdate) \r\n \r\n return spec, timeseries, header, meta", "def read_input_dat_file(file_name, daq_channel):\n data = pd.read_csv(file_name, sep=\"\\t\", header=None).T\n grouped = data.groupby(0)\n scale = 2*1000/(constants.c*constants.femto/constants.micro)\n print(\"scale = {0} fs/mm\".format(scale))\n x = np.array(grouped.mean().index) # conversion from mm to fs delay\n x = (x - np.mean(x))*scale \n y = np.array(grouped.mean().values[:,daq_channel])\n std_dev = np.array(grouped.std().values[:, daq_channel])\n return x, y, std_dev", "def parse_data(filepath):\n settings = dict()\n intensity = list()\n # Boolean flags to check when to start/stop\n # reading parameters\n read_params = False\n read_int = False\n read_zeeman = False\n finished = False\n fieldoff_intensities = list()\n fieldon_intensities = list()\n with open(filepath) as read_file:\n for line in read_file:\n if \"*****\" in line:\n read_int = False\n if finished is True:\n break\n if \"Scan\" in line:\n if \"[Field ON]\" in line:\n read_zeeman = True\n scan_details = line.split()\n settings[\"ID\"] = int(scan_details[1])\n # settings[\"Date\"] = str(scan_details[4])\n read_params = True\n read_int = False\n continue\n if read_int is True:\n if read_zeeman is False:\n fieldoff_intensities += [float(value) for value in line.split()]\n else:\n fieldon_intensities += [float(value) for value in line.split()]\n finished = True\n if read_params is True and len(line.split()) > 1:\n # Read in the frequency step, frequency, and other info\n # needed to reconstruct the frequency data\n scan_params = line.split()\n shift = 1\n settings[\"Frequency\"] = float(scan_params[0])\n settings[\"Frequency step\"] = float(scan_params[1])\n if len(scan_params) == 4:\n settings[\"Multiplier\"] = 1.\n shift = 0\n # If the multiplier data is there, we don't shift the read\n # index over by one\n else:\n settings[\"Multiplier\"] = float(scan_params[2])\n settings[\"Center\"] = float(scan_params[2 + shift])\n settings[\"Points\"] = int(scan_params[3 + shift])\n read_params = False\n # Start reading intensities immediately afterwards\n read_int = True\n continue\n fieldoff_intensities = np.array(fieldoff_intensities)\n fieldon_intensities = np.array(fieldon_intensities)\n\n # Generate the frequency grid\n settings[\"Frequency step\"] = settings[\"Frequency step\"] * settings[\"Multiplier\"]\n # This calculates the length of either side\n side_length = settings[\"Frequency step\"] * (settings[\"Points\"] // 2)\n start_freq = settings[\"Frequency\"] - side_length\n end_freq = settings[\"Frequency\"] + side_length\n frequency = np.linspace(start_freq, end_freq, settings[\"Points\"])\n\n return frequency, fieldoff_intensities, fieldon_intensities, settings", "def read_file(self,file_name):\r\n data = np.genfromtxt(file_name)\r\n return data;", "def loadtext(infile):\n warr, farr, earr=np.loadtxt(infile, usecols=(0,1,2), unpack=True)\n return create_spectrum(warr, farr, earr)", "def openfile(filename):\n Data = np.genfromtxt(filename, delimiter = \",\")\n data = [[]]\n for i in range(np.shape(Data)[0]):\n #Stores information row-by-row\n data.append(Data[i][0:])\n return data", "def load_data(file_to_read):\n\n data = np.recfromtxt(file_to_read)\n data = np.asarray(data)\n\n return data", "def _txt_to_numpy(file):\r\n\r\n # read file\r\n filo = open(file, \"r\")\r\n compt = 0\r\n signal = []\r\n for row in filo:\r\n listy = []\r\n listy = row.split(\" \")\r\n\r\n listy[-1] = listy[-1][:-2]\r\n # print listy\r\n loop = []\r\n for elm in listy:\r\n if not elm == \"\":\r\n loop.append(elm)\r\n loop = [float(loop[i]) for i in range(len(loop))]\r\n # print loop\r\n # print len(loop)\r\n if not len(loop) == 19:\r\n print(loop)\r\n print(len(loop))\r\n signal.append(loop)\r\n compt += 1\r\n\r\n signal = np.asanyarray(signal)\r\n signal = np.transpose(signal)\r\n\r\n return signal", "def read_data(self, path):\n if self.data_format == 'twenty': \n length = 20\n else: raise ValueError(\"self.data_format = '%s' unknown.\" % \n self.data_format)\n data = []\n with open(path,'r') as f:\n for line in f:\n data.append([float(line[k:(k + length)]) for k in range(\n 0, len(line.strip('\\n')),length)])\n return np.array(data)", "def load_input(file_name, elements):\n\n input_file = open(file_name)\n input_data = []\n\n while True:\n chunk = input_file.readline()\n\n if(chunk == ''):\n break\n \n ret = load_chunk(chunk, elements)\n\n # Convert data to frequency domain using fft()\n input_data.append([i.real for i in fft(ret)])\n\n return input_data", "def read(filename):\n with open(filename, 'r') as fRead:\n samples = list(map(lambda line: line.strip(), fRead))\n return samples", "def read_data(self,filename):\n self.x = [] #Input values\n self.t = [] #Target values\n\n with open(filename, \"r\") as infile:\n lines = infile.readlines()\n self.n = len(lines)\n for line in lines:\n words = line.split()\n self.x.append(float(words[0]))\n self.t.append(float(words[1]))\n\n self.x = np.array(self.x)\n self.t = np.array(self.t)\n self.create_design_matrix()", "def loadatran(filename, wl=True, verbose=False):\n # 2008-08-21 09:42 IJC: Created to save myself a bit of time\n # 2008-08-25 10:08 IJC: Read in all lines at once; should go\n # faster with sufficient memory\n # 2008-09-09 13:56 IJC: Only convert the wavelength and flux\n # columns(#1 & #2) -- speed up slightly.\n if filename.__class__==list:\n returnlist = []\n for element in filename:\n returnlist.append(loadatran(element, wl=wl))\n return returnlist\n \n f = open(filename, 'r')\n dat = f.readlines()\n f.close()\n if verbose:\n print dat[0]\n print dat[0].split()\n print dat[0].split()[1:3]\n print dat[0].split()[2]\n \n if wl:\n data = array([map(float, line.split()[1:3]) for line in dat])\n else:\n data = array([float(line.split()[2]) for line in dat])\n\n return data", "def read_file(self):\n\n\t\twith open(self.filename, 'r') as f:\n\t\t\tfor line in f:\n\t\t\t\tif len(line)>1:\n\t\t\t\t\tlenght_value,array_values = line.split(';')\n\t\t\t\t\tlist_values = [int(x) for x in array_values.split(',')]\n\t\t\t\t\tprint self.get_arraysurdit(list_values)", "def open_xy(data):\n twotheta, intensity = [], []\n with open(data) as f:\n for line in f:\n row = line.split()\n twotheta.append(row[0])\n intensity.append(row[1])\n xyarray = list(zip(twotheta, intensity))\n xyarray = np.asarray(xyarray)\n xyarray = xyarray.astype(np.float)\n return xyarray", "def aer_raw_events_from_file(filename):\n events = aer_load_from_file(filename)\n count = 0\n for ts_mus, x, y, s in events:\n a = np.zeros(dtype=aer_raw_event_dtype, shape=())\n a['timestamp'] = ts_mus / (1000.0 * 1000.0)\n a['x'] = x \n a['y'] = y\n a['sign'] = s\n yield a\n count += 1", "def read_log_imuaccel(ac_id, filename):\n f = open(filename, 'r')\n pattern = re.compile(\"(\\S+) \"+ac_id+\" IMU_ACCEL (\\S+) (\\S+) (\\S+)\")\n list_meas = []\n while True:\n line = f.readline().strip()\n if line == '':\n break\n m = re.match(pattern, line)\n if m:\n list_meas.append([float(m.group(1)), float(m.group(2)), float(m.group(3)), float(m.group(4))])\n return np.array(list_meas)", "def spectrum_tsv(f):\n\n skip = 0\n while True:\n try:\n wav, flux = np.loadtxt(f, skiprows=skip, unpack=True)\n\n except ValueError:\n # If the first lines have a header\n skip += 1\n\n else:\n break\n\n return wav, flux", "def load_file(file_name) -> np.ndarray:\r\n reader = csv.reader(open(file_name, \"r\"), delimiter=',')\r\n x_rdr = list(reader)\r\n return np.array(x_rdr).astype('float')", "def load_spe(filename):\n def read_at(data, pos, size, ntype):\n raw.seek(pos)\n return np.fromfile(raw, ntype, size)\n raw = open(filename, 'rb')\n xdim = np.int64(read_at(raw, 42, 1, np.int16)[0])\n ydim = np.int64(read_at(raw, 656, 1, np.int16)[0])\n arr = read_at(raw, 4100, xdim*ydim, np.uint16)\n arr = arr.reshape((ydim, xdim))\n print('data shape: {}'.format(np.shape(arr)))\n if np.shape(arr)[0] == 1:\n arr = arr[0]\n print('data shape: {}'.format(np.shape(arr)))\n return arr", "def read_data(path):\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'pos', '*.txt'))])\n data = [(1, open(f).readlines()[0]) for f in sorted(fnames)]\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'neg', '*.txt'))])\n data += [(0, open(f).readlines()[0]) for f in sorted(fnames)]\n data = sorted(data, key=lambda x: x[1])\n return np.array([d[1] for d in data]), np.array([d[0] for d in data])", "def read_data(path):\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'pos', '*.txt'))])\n data = [(1, open(f).readlines()[0]) for f in sorted(fnames)]\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'neg', '*.txt'))])\n data += [(0, open(f).readlines()[0]) for f in sorted(fnames)]\n data = sorted(data, key=lambda x: x[1])\n return np.array([d[1] for d in data]), np.array([d[0] for d in data])", "def load_cbin(filename,channel=0):\n \n # .cbin files are big endian, 16 bit signed int, hence dtype=\">i2\" below\n data = np.fromfile(filename,dtype=\">i2\")\n recfile = filename[:-5] + '.rec'\n rec_dict = readrecf(recfile)\n data = data[channel::rec_dict['num_channels']] # step by number of channels\n sample_freq = rec_dict['sample_freq']\n return data, sample_freq", "def readAudioFile(path):\n\n extension = os.path.splitext(path)[1]\n\n try:\n # Commented below, as we don't need this\n # #if extension.lower() == '.wav':\n # #[Fs, x] = wavfile.read(path)\n # if extension.lower() == '.aif' or extension.lower() == '.aiff':\n # s = aifc.open(path, 'r')\n # nframes = s.getnframes()\n # strsig = s.readframes(nframes)\n # x = numpy.fromstring(strsig, numpy.short).byteswap()\n # Fs = s.getframerate()\n if extension.lower() == '.mp3' or extension.lower() == '.wav' or extension.lower() == '.au' or extension.lower() == '.ogg':\n try:\n audiofile = AudioSegment.from_file(path)\n except:\n print(\"Error: file not found or other I/O error. \"\n \"(DECODING FAILED)\")\n return -1 ,-1\n\n if audiofile.sample_width == 2:\n data = numpy.fromstring(audiofile._data, numpy.int16)\n elif audiofile.sample_width == 4:\n data = numpy.fromstring(audiofile._data, numpy.int32)\n else:\n return -1, -1\n Fs = audiofile.frame_rate\n x = numpy.array(data[0::audiofile.channels]).T\n else:\n print(\"Error in readAudioFile(): Unknown file type!\")\n return -1, -1\n except IOError:\n print(\"Error: file not found or other I/O error.\")\n return -1, -1\n\n if x.ndim == 2:\n if x.shape[1] == 2:\n x = x.flatten()\n\n return Fs, x", "def filesample(filename):\n sampling_rate, samples = wavfile.read(filename)\n times = np.arange(len(samples)) / sampling_rate\n return samples, sampling_rate", "def load_cbin(filename, channel=0):\n\n # .cbin files are big endian, 16 bit signed int, hence dtype=\">i2\" below\n data = np.fromfile(filename, dtype=\">i2\")\n recfile = filename[:-5] + '.rec'\n rec_dict = readrecf(recfile)\n data = data[channel::rec_dict['num_channels']] # step by number of channels\n sample_freq = rec_dict['sample_freq']\n return data, sample_freq", "def readInput(fileName):\n with open(fileName, 'r') as file:\n\n plotArray = []\n for line in file:\n plotArray.append(list(line.strip()))\n\n return plotArray", "def file_read(file_name):\n \n #open specified file in read mode\n in_file = open(file_name, \"r\")\n \n #create data lists\n sp_length_v3 = []\n sp_period_v3 = [] \n\n #save header to string and split into list\n header_string = in_file.readline()\n header_v3 = header_string.split()\n \n #save revelent data to respective lists\n for line in in_file:\n values = line.split()\n sp_length_v3.append(float(values[1]))\n sp_period_v3.append(float(values[2]))\n \n #close the file\n in_file.close()\n \n #return 3D lists of lists containing data\n ans = [sp_length_v3, sp_period_v3, header_v3]\n \n return ans", "def Read_t_file(file_name):\n t=[]\n \n with open(file_name,'r') as reader:\n temp=reader.readline().strip().split()[-1].split('-')\n t.append(temp[0])\n t.append(temp[1])\n for line in reader.readlines():\n t.append(line.strip().split()[-1].split('-')[-1])\n \n return np.array(t,dtype=np.float32)", "def read_infile(infile):\n # There are a variable header lengths possible.\n # Loop through and look for when the line starts\n # with '1', the first index.\n nheader = 0\n try:\n with open(infile, 'r') as f:\n for line in f:\n if line.strip().startswith('1'):\n break\n nheader += 1\n except IOError:\n message = f'Unable to open {infile} in modconvert.'\n raise PipeCalError(message)\n index, freq, tbr, flux, trj = np.genfromtxt(infile, unpack=True,\n skip_header=nheader)\n return index, freq, tbr, flux, trj", "def read_datafile(shower_name):\n\twith open(datafile_dir+'datafile_'+shower_name) as file:\n\t\tamplitudes = [float(line) for line in file]\n\tcount = [x+1 for x in range(len(amplitudes))[::-1]]\n\treturn amplitudes, count", "def read_data(filename):\n data = np.genfromtxt(filename, delimiter=',', dtype=str)\n X = data[1:,2:].astype(np.float)\n y = data[1:,0]\n y[y==label0]='0'\n y[y==label1]='1'\n y[y==label2]='2'\n y=y.astype(np.float)\n return X, y", "def get_data(data_file_path):\n data_file = open(data_file_path, 'r').readlines()\n data = []\n n = -1\n dim = -1\n for i in range(len(data_file)):\n line_elems = [float(x) for x in data_file[i].split()]\n if i == 0:\n n = int(line_elems[0])\n dim = int(line_elems[1])\n else:\n data.append(np.array(line_elems))\n return data, n, dim", "def spectrum_tsv3(f):\n skip = 0\n while True:\n try:\n wav, flux, dflux = np.loadtxt(f, skiprows=skip, unpack=True)\n\n except ValueError:\n # If the first lines have a header\n skip += 1\n\n else:\n break\n\n return wav, flux", "def read_file_agsm(self,filename):\n\n narr,larr,farr,iarr,nn,exceed_freqlim = \\\n aims_fortran.read_file_agsm(filename,config.npositive,config.agsm_cutoff, \\\n config.cutoff*self.cutoff)\n self.modes = np.array(zip(narr[0:nn],larr[0:nn],farr[0:nn],iarr[0:nn]),dtype=modetype)\n\n return exceed_freqlim", "def get_data(dataf):\n with open(dataf) as f:\n label = []\n e_val = []\n for line in f:\n label.append(float(line.split()[1]))\n e_val.append(-1 * float(line.split()[0]))\n return label, e_val", "def get_freq_array(bandwidth, n_chans):\n return numpy.arange(n_chans)*float(bandwidth)/n_chans", "def load_examples(filename):\r\n data = np.load(filename)\r\n return data['examples'], int(data['srate'])", "def load_examples(filename):\r\n data = np.load(filename)\r\n return data['examples'], int(data['srate'])", "def read_data(self):\n self.data = reduce_spectrum(self.filename)", "def parse_txt_file(txtfile):\n array = np.genfromtxt(txtfile)\n return array", "def loadTTLPulse(file, n_channels = 2, fs = 20000, track = 0, mscope = 1):\n f = open(file, 'rb')\n startoffile = f.seek(0, 0)\n endoffile = f.seek(0, 2)\n bytes_size = 2 \n n_samples = int((endoffile-startoffile)/n_channels/bytes_size)\n f.close()\n with open(file, 'rb') as f:\n data = np.fromfile(f, np.uint16).reshape((n_samples, n_channels))\n \n ch_track = data[:,track].astype(np.int32)\n peaks,_ = scipy.signal.find_peaks(np.diff(ch_track), height=30000)\n timestep = np.arange(0, len(data))/fs\n peaks+=1\n ttl_track = pd.Series(index = timestep[peaks], data = data[peaks,track]) \n\n ch_mscope = data[:,mscope].astype(np.int32)\n peaks,_ = scipy.signal.find_peaks(np.abs(np.diff(ch_mscope)), height=30000)\n peaks+=1\n ttl_mscope = pd.Series(index = timestep[peaks], data = data[peaks,mscope])\n\n return ttl_track, ttl_mscope", "def eeg_readavr(file):\t\n\tf=open(file,'r')\t\n\tfirstline = f.readline() # ntpts TSB info etc\n\tstr = string.split(firstline)\n\tntpts = int(str[1])\n\tnchan = int(str[11])\n\ttsb = float(str[3])\n\tdi = float(str[5])\t\n\ttim = np.arange(tsb,ntpts*di+tsb,di)\n\tsecondline = f.readline()\n\tchnam = string.split(secondline)\n\teeg = np.zeros([nchan,ntpts])\t\t\n\tfor i in range(0,nchan):\n\t\ttestline = f.readline()\n\t\ttestline = testline.strip().split()\t\t\n\t\teeg[i,:]=np.array(map(float,testline))\n\t\t\n\tf.close()\n\treturn eeg,tim,nchan,ntpts", "def read_array(self, filename):\n extension = filename.split('.')[-1] # Get file extension\n if extension == 'mat':\n array = sci.loadmat(filename)\n elif extension == 'npy':\n array = np.load(filename)\n else:\n print('Error!!! Unrecognised file type for read_array()')\n array = None\n return array", "def loadPulseData(filename, suffix = ''):\n data = np.genfromtxt(filename+'.txt', skip_header=3, names=True,\n dtype='i8,f8,S5,f8,f8,f8,f8,f8,f8')\n print \"Importing...\\n\"\n for key in data.dtype.fields.keys():\n name = key + suffix\n print name\n globals()[name] = data[key]", "def read_log_airdata(ac_id, filename):\n f = open(filename, 'r')\n pattern = re.compile(\"(\\S+) \"+ac_id+\" AIR_DATA (\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (\\S+)\")\n list_meas = []\n while True:\n line = f.readline().strip()\n if line == '':\n break\n m = re.match(pattern, line)\n if m:\n list_meas.append([float(m.group(1)), float(m.group(2)), float(m.group(3)), float(m.group(4)), float(m.group(5)), float(m.group(6)), \n float(m.group(7)), float(m.group(8))])\n return np.array(list_meas)", "def read_data(filename):\n data = np.genfromtxt(filename, delimiter=',', dtype = str)\n X = data[1:,2:].astype(np.float)\n y = data[1:,0]\n y[y==label0]='0' \n y[y==label1]='1' \n y[y==label2]='2'\n y.astype(np.float) \n return X, y", "def load(filename):\n lines = [l.strip('\\r\\n ') for l in open(filename, 'r').readlines()]\n lines = [l for l in lines if l != '']\n dims = [re.split(r'\\s+', l) for l in lines]\n f = np.array([[float(f) for f in d] for d in dims])\n return f", "def note(freq):\n data = np.sin(2.0 * np.pi * freq * t) * amp\n return data", "def numpy_from_audio(audio_file, downsample_factor = None):\n sample_rate, samples = wavfile.read(audio_file.replace('\\'',''))\n if downsample_factor is not None:\n samples = signal.resample(samples, len(samples) // downsample_factor)\n sample_rate //= downsample_factor\n drop_samples = -(len(samples) % sample_rate)\n return samples[:drop_samples], sample_rate", "def read_flt_file(filename):\n\n fid = open(filename,'rb')\n arr = array.array('i')\n arr.fromfile(fid, 1) # dim\n dim = arr[0]\n #http://www.python.org/search/hypermail/python-1993/0393.html\n if dim>100:\n \"\"\"print 'Read very high dimension (>100).'\n print 'Endianness may come into play.'\n print 'Try to swap the byte order.'\"\"\"\n swap = True;\n arr.byteswap()\n dim = arr[0]\n #print 'dim =',dim\n else:\n swap = False\n assert(dim>=1 and dim<=4) # only accept data up to 4 dimensions.\n\n arr = array.array('i')\n arr.fromfile(fid,dim+2)\n if swap:\n arr.byteswap()\n volume = reduce(lambda x,y: x*y, arr[0:dim-1], 1)\n\n binvalues = array.array('f')\n binvalues.read(fid, volume*arr[dim-1])\n if swap:\n binvalues.byteswap()\n fid.close()\n\n data = numpy.array(binvalues, numpy.float)\n data = numpy.reshape(data, (arr[dim-1], volume))\n\n return (arr[:dim],data)", "def readfile(self, path, filename):\n # The DataStudio software uses ISO-8859-1 encoding (especially for the degree sign in temperature files)\n file = open(path + filename, encoding=\"iso-8859-1\")\n rowlist = file.readlines()\n\n title = rowlist[0].strip(\"\\n\")\n labels = rowlist[1].strip(\"\\n\").split(sep=\"\\t\")\n\n data = np.zeros((len(rowlist)-2, 2))\n\n for i in range(2, len(rowlist)):\n columns = rowlist[i].split(sep=\"\\t\")\n data[i-2, 0] = float(columns[0].replace(\",\", \".\"))\n data[i-2, 1] = float(columns[1].replace(\",\", \".\"))\n\n return data, title, labels", "def get_sound_data(sec, freq_func, amplitude):\n t = np.linspace(0, sec, SAMPLERATE * sec)\n data = np.array(t, dtype=np.int16)\n for i in range(data.size):\n data[i] = amplitude * np.sin(2 * np.pi * quad(freq_func, 0, t[i])[0])\n return data", "def convert(filename, signal=sampleArray(), \n centerEta = DEFCENTERETA, centerPhi = DEFCENTERPHI): \n numEvents = 0\n listOfSignals = []\n # First we open the file\n with open(filename, \"r\") as f:\n content = f.readlines()\n numEvents = len(content)\n for i in range(0, numEvents):\n if i % 1000 == 0:\n print(i)\n thisEvent = content[i]\n try:\n reducedEvent = convertEvent(\n thisEvent, centerEta, centerPhi, signal.shape[0], signal.shape[1]\n ) \n listOfSignals.append(reducedEvent)\n except indexError:\n 0\n print(\"Converted\", len(listOfSignals), \"out of\", numEvents, \"events\")\n return listOfSignals", "def read_forces(filename):\n f=open(filename,\"r\")\n castep_forces = f.readlines()\n f.close() \n nruter = []\n for index, line in enumerate(castep_forces):\n if 'Total number of ions in cell' in line:\n n_atoms = int(line.split()[7])\n if 'Cartesian components (eV/A)' in line:\n starting_line = index + 4\n for i in range(n_atoms):\n f = starting_line + i\n nruter.append([float(castep_forces[f].split()[m]) for m in range(3,6)]) \n nruter=np.array(nruter,dtype=np.double)\n return nruter", "def load_SCOUT_permittivity(filename):#{{{\n\n\n ## Open the file for binary access\n f = open(filename, \"rb\")\n\n ## Load the number of data points, type of x axis and its boundaries\n f.seek(151); datalength = np.fromfile(f, dtype=np.uint16, count=1)[0]\n print(datalength)\n f.seek(160); x_axis_type = np.fromfile(f, dtype=np.uint8, count=1)[0]\n print(x_axis_type)\n f.seek(166); x_start, x_end = np.fromfile(f, dtype=np.float32, count=2)\n print(x_start)\n print(x_end)\n ## Load the n, k data\n f.seek(174); raw_eps = np.fromfile(f, dtype=np.float32, count=datalength*2)\n f.close\n\n eps = raw_eps[::2] + 1j*raw_eps[1::2]\n \n from scipy.constants import h, c, eV\n if x_axis_type == 2: # 'eV' \n freq = np.linspace(x_start*eV/h, x_end*eV/h, datalength) \n elif x_axis_type == 3: # 'um' \n wavelength = np.linspace(x_start*1e-6, x_end*1e-6, datalength)\n freq = c/wavelength\n elif x_axis_type == 0: # 'cm-1' \n freq = np.linspace(x_start*100*c, x_end*100*c, datalength) \n\n return freq, eps", "def read_wav(fname, normalize=True):\n # samps_int16: N x C or N\n # N: number of samples\n # C: number of channels\n sampling_rate, samps_int16 = wavfile.read(fname)\n # N x C => C x N\n samps = samps_int16.astype(np.float)\n # tranpose because I used to put channel axis first\n if samps.ndim != 1:\n samps = np.transpose(samps)\n # normalize like MATLAB and librosa\n if normalize:\n samps = samps / MAX_INT16\n return sampling_rate, samps", "def spectrum_misc(f):\n\n end = False\n while not end:\n try:\n line = f.readline().split()\n wavnew = [float(w) for w in line]\n wav = np.append(wav, wavnew)\n prevwav = wavnew[-1]\n\n except BaseException:\n end = True\n aflux = f.readlines()\n for line in aflux:\n line = re.sub(r\"-10\\d\", \"e-100\", line)\n flux = np.append(flux, line.rstrip().split())\n\n wav, flux = np.array(wav), np.array(flux)\n return wav, flux", "def read_filter(filter_file):\n\n fd = open(filter_file, \"r\")\n lines = fd.readlines()\n fd.close()\n\n wavelengths = []\n weights = []\n for line in lines:\n line = line.strip()\n words = line.split()\n wavelengths.append(float(words[0]))\n weights.append(float(words[1]))\n\n return (wavelengths, weights)", "def _read_pha(file):\n with fits.open(file) as hdul:\n data = hdul[1].data\n header_for_livetime = hdul[0].header\n\n return data['channel'], data['counts'], header_for_livetime['LIVETIME']", "def spectre_csv(f):\n \n skip = 0\n while True:\n try: \n wav, flux = np.loadtxt(f, delimiter = ',',\n skiprows = skip, unpack = True)\n \n except ValueError:\n # Si les premiรจre lignes ont un en-tรชte\n skip += 1\n \n else:\n break\n \n return wav,flux", "def loadascii(self, filename=None):\n data = np.loadtxt(filename)\n if len(data.shape) == 1:\n self.flux = data\n elif len(data.shape) == 2:\n self.wavelength = data[:,0]\n self.flux = data[:,1]", "def load_data(self, f): \n self.sampling = True\n self.reads = np.load(f)\n self.total = self.reads.shape[0]", "def load_file(filename):\r\n file =np.genfromtxt(filename, delimiter=',')\r\n return file" ]
[ "0.7232917", "0.71028847", "0.709317", "0.6883066", "0.6783753", "0.67701334", "0.67288643", "0.6692682", "0.6670123", "0.66636634", "0.66447705", "0.65392363", "0.64900935", "0.6441326", "0.6395796", "0.63950247", "0.637628", "0.6370568", "0.6367199", "0.6345476", "0.63408554", "0.63217086", "0.63135296", "0.6312749", "0.62945575", "0.6292201", "0.6291758", "0.62905186", "0.62896425", "0.62828547", "0.6277323", "0.62716854", "0.6247929", "0.6246564", "0.6239385", "0.62392426", "0.6232933", "0.62176895", "0.6213046", "0.62117314", "0.6209931", "0.61912906", "0.61806923", "0.6179785", "0.61557317", "0.6148467", "0.61368924", "0.6130132", "0.60839885", "0.60828066", "0.60795426", "0.6056614", "0.6052197", "0.6051782", "0.60482347", "0.6044013", "0.60439956", "0.60439956", "0.604061", "0.6024761", "0.6022428", "0.6020344", "0.60137016", "0.5999325", "0.5996085", "0.59923804", "0.59921724", "0.5984059", "0.5983641", "0.59810674", "0.59726906", "0.5971907", "0.59689367", "0.59654474", "0.59654474", "0.5964025", "0.59539926", "0.5932216", "0.5914209", "0.5912123", "0.5908288", "0.5904174", "0.59016436", "0.5899162", "0.58972955", "0.5895865", "0.5875239", "0.58682734", "0.5868233", "0.58674675", "0.58673066", "0.58662766", "0.58656126", "0.5843098", "0.584226", "0.5834519", "0.5827411", "0.5825261", "0.58221495", "0.58218014" ]
0.7644199
0
Use apply_default decorator for the `default_args` feature to work properly; deprecated. In previous versions, all subclasses of BaseOperator must use apply_default decorator for the" `default_args` feature to work properly. In current version, it is optional. The decorator is applied automatically using the metaclass.
def apply_defaults(func: T) -> T: warnings.warn( "This decorator is deprecated. \n" "\n" "In previous versions, all subclasses of BaseOperator must use apply_default decorator for the " "`default_args` feature to work properly.\n" "\n" "In current version, it is optional. The decorator is applied automatically using the metaclass.\n", RemovedInAirflow3Warning, stacklevel=3, ) # Make it still be a wrapper to keep the previous behaviour of an extra stack frame @wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) return cast(T, wrapper)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def apply_defaults(func: T) -> T:\n warnings.warn(\n \"This decorator is deprecated. \\n\"\n \"\\n\"\n \"In previous versions, all subclasses of BaseOperator must use apply_default decorator for the \"\n \"`default_args` feature to work properly.\\n\"\n \"\\n\"\n \"In current version, it is optional. The decorator is applied automatically using the metaclass.\\n\",\n DeprecationWarning,\n stacklevel=3,\n )\n\n # Make it still be a wrapper to keep the previous behaviour of an extra stack frame\n @wraps(func)\n def wrapper(*args, **kwargs):\n return func(*args, **kwargs)\n\n return cast(T, wrapper)", "def default_args(defaults):\r\n def wrapper(func):\r\n @wraps(func) # just to show docstring of original function\r\n def new_func(*args, **kwargs):\r\n kwargs = defaults | kwargs\r\n return func(*args, **kwargs)\r\n return new_func\r\n return wrapper", "def _apply_defaults(func, args, kwargs):\n\n sig = signature(func)\n bound_arguments = sig.bind(*args, **kwargs)\n for param in sig.parameters.values():\n if param.name not in bound_arguments.arguments:\n bound_arguments.arguments[param.name] = param.default\n args = [bound_arguments.arguments[key] for key in sig.parameters.keys()]\n return args, {}", "def default_params(defaults):\n def wrap(function):\n def withargs(*args, **kwargs):\n merged = {}\n merged.update(defaults)\n merged.update(kwargs)\n return function(*args, **merged)\n return withargs\n return wrap", "def decorator_defaults(func):\n from inspect import isfunction\n @wraps(func)\n def my_wrap(*args,**kwargs):\n if len(kwargs)==0 and len(args)==1 and isfunction(args[0]):\n # call without parentheses\n return func(*args)\n else:\n def _(f):\n return func(f, *args, **kwargs)\n return _\n return my_wrap", "def decorator_defaults(func):\n from inspect import isfunction\n @wraps(func)\n def my_wrap(*args,**kwargs):\n if len(kwargs)==0 and len(args)==1 and isfunction(args[0]):\n # call without parentheses\n return func(*args)\n else:\n def _(f):\n return func(f, *args, **kwargs)\n return _\n return my_wrap", "def _default_wrapper(recons_func, **kwargs):\n return recons_func(**kwargs)", "def _add_default_ops(op_name):\n _add_default_op(op_name)\n _add_default_reverse_op(op_name)", "def _add_default_op(op_name):\n _add_op(\"__%s__\"%op_name, getattr(operator, op_name))", "def get_default_args(**kw):\n default_args_exp = {\n \"output_file\": \"ml_demo.c\",\n \"function_name\": \"ml_demo\",\n \"precision\": ML_Binary32,\n \"accuracy\": ML_Faithful,\n \"target\": GenericProcessor.get_target_instance()\n }\n default_args_exp.update(kw)\n return DefaultArgTemplate(**default_args_exp)", "def add_default_args(kwargs_old, **kwargs_new):\n for key in kwargs_new:\n if key not in kwargs_old:\n kwargs_old[key] = kwargs_new[key]", "def override_default_kwargs(**overrides):\n def decorator(function):\n @functools.wraps(function)\n def replacement(*args, **kwargs):\n # Allow our default kwargs to be overriden if specified\n final_kwargs = copy.deepcopy(overrides)\n final_kwargs.update(**kwargs)\n return function(*args, **final_kwargs)\n return replacement\n return decorator", "def rapplicator(default_f):\n method_name = default_f.__name__\n def apply(f, *args, **kwds):\n if isinstance(f, rfunc):\n return getattr(f, method_name)(*args, **kwds)\n elif isinstance(f, functools.partial):\n if f.keywords:\n new_keywords = copy.copy(f.keywords)\n new_keywords.update(kwds)\n return apply(f.func, *(f.args + args), **new_keywords)\n else:\n return apply(f.func, *(f.args + args))\n else:\n return default_f(f, *args, **kwds)\n return apply", "def default_arg(default):\n class DefaultArg(argparse.Action):\n def __call__(self, parser, namespace, value, option_string):\n if value is None:\n setattr(namespace, self.dest, default)\n else:\n setattr(namespace, self.dest, value)\n\n return DefaultArg", "def has_default_args(self, node, ordered_functions):\n # Need to create individual routines for Fortran and C\n if node.wrap.fortran == False and node.wrap.c == False:\n return\n if node.options.F_default_args != \"generic\":\n return\n default_funcs = []\n\n default_arg_suffix = node.default_arg_suffix\n ndefault = 0\n\n min_args = 0\n for i, arg in enumerate(node.ast.declarator.params):\n if arg.declarator.init is None:\n min_args += 1\n continue\n new = node.clone()\n self.append_function_index(new)\n new._generated = \"has_default_arg\"\n del new.ast.declarator.params[i:] # remove trailing arguments\n new._has_default_arg = False\n # Python and Lua both deal with default args in their own way\n new.wrap.assign(c=True, fortran=True)\n fmt = new.fmtdict\n try:\n fmt.function_suffix = default_arg_suffix[ndefault]\n except IndexError:\n # XXX fmt.function_suffix =\n # XXX fmt.function_suffix + '_nargs%d' % (i + 1)\n pass\n default_funcs.append(new._function_index)\n ordered_functions.append(new)\n ndefault += 1\n\n # keep track of generated default value functions\n node._default_funcs = default_funcs\n node._nargs = (min_args, len(node.ast.declarator.params))\n # The last name calls with all arguments (the original decl)\n try:\n node.fmtdict.function_suffix = default_arg_suffix[ndefault]\n except IndexError:\n pass", "def default_attrgetter(*args, default=None):\n\n def retrieve(obj):\n try:\n return attrgetter(*args)(obj)\n except AttributeError:\n return default\n\n return retrieve", "def _default_command(cmds, argv):\n if len(cmds) != 1 or cmds[0].__name__ != DEFAULT_COMMAND:\n return None\n dc = cmds[0]\n spec = inspect.getargspec(dc)\n if not (spec.varargs and spec.keywords):\n return dc\n save_argv = argv[:]\n\n def _wrap_default_command():\n return dc(*save_argv)\n\n del argv[:]\n return _wrap_default_command", "def twolevel_default_params(defaults):\n def wrap(function):\n def withargs(*args, **kwargs):\n merged = {}\n merged.update(defaults)\n for k, v in kwargs.items():\n if type(v) == dict and k in merged and type(merged[k]) == dict:\n merged[k].update(v)\n else:\n merged[k] = v\n return function(*args, **merged)\n\n return withargs\n\n return wrap", "def default_property(default_fn: Optional[DefaultFunction] = None, **kwargs):\n # pylint: disable=C0301 (line-too-long unavoidable because of doctest)\n if default_fn is None:\n return functools.partial(_DefaultProperty, **kwargs)\n return _DefaultProperty(default_fn=default_fn, **kwargs)", "def f_onearg_and_default(self, arg1, default = 1) :\n pass", "def _default_arguments(self, obj):\n \n if not (inspect.isfunction(obj) or inspect.ismethod(obj)):\n # for classes, check for __init__,__new__\n if inspect.isclass(obj):\n obj = (getattr(obj,'__init__',None) or\n getattr(obj,'__new__',None))\n # for all others, check if they are __call__able\n elif hasattr(obj, '__call__'):\n obj = obj.__call__\n # XXX: is there a way to handle the builtins ?\n try:\n args,_,_1,defaults = inspect.getargspec(obj)\n if defaults:\n return args[-len(defaults):]\n except TypeError: pass\n return []", "def default():", "def add_default_numeric_op(op_name):\n add_numeric_op(\"__%s__\"%op_name, getattr(operator, op_name))", "def test_default_argument(self):\n @converters.wrap\n def inner_test(param: int = 5):\n \"\"\"Make sure the default was used.\"\"\"\n self.assertEqual(param, 5)\n inner_test()", "def add_default_numeric_ops(op_name):\n add_default_numeric_op(op_name)\n add_default_reverse_numeric_op(op_name)", "def add_default_numeric_ops(op_name):\n add_default_numeric_op(op_name)\n add_default_reverse_numeric_op(op_name)", "def get_default_args(**kw):\n default_args_log = {\n \"output_file\": \"POLY.c\",\n \"function_name\": \"POLY\",\n \"precision\": ML_Binary64,\n \"target\": GenericProcessor.get_target_instance(),\n \"function\": None,\n \"interval\": None,\n \"epsilon\": None\n }\n default_args_log.update(kw)\n return DefaultArgTemplate(**default_args_log)", "def default(self):\n raise NotImplementedError", "def f_default(self, default = 1) :\n pass", "def default(self, result: Union[Callable, Any], **kwargs):\n self._default = result, kwargs", "def set_default(func=None, passed_kwargs={}):\n if func is None:\n return functools.partial(set_default, passed_kwargs=passed_kwargs)\n\n @functools.wraps(func)\n def init(*args, **kwargs):\n for k in passed_kwargs.keys():\n kwargs.setdefault(k,passed_kwargs[k])\n return func(*args, **kwargs)\n return init", "def add_default_numeric_op(op_name):\n add_numeric_op(\"__%s__\"%op_name)", "def args_with_defaults(args, defaults):\n\n def argument(arg, default):\n \"\"\"\n Arg=Default pair if Default is present\n\n :param arg: argument name\n :type arg: ``str``\n :param default: default value for argument\n :type default: ``object``\n\n :return: string representation\n :rtype: ``str``\n \"\"\"\n return \"{0}={1}\".format(arg, default) if default else arg\n\n return \", \".join(\n reversed(\n [\n argument(arg, default)\n for arg, default in zip_longest(\n reversed(args), reversed(defaults)\n )\n ]\n )\n )", "def get_default_args(func):\n signature = inspect.signature(func)\n return {\n k: v.default\n for k, v in signature.parameters.items()\n if v.default is not inspect.Parameter.empty\n }", "def set_default_operator(self, operator):\n return self.set_param(\"default_operator\", operator)", "def _format_default_functions(self):\n self._out_formatter = null_out_formatter\n self._f_default_names = lambda x: [0]", "def default(default_value, force=False):\n def default_setter(value):\n \"\"\"\n Sets the value to the given default value, assuming the original value\n is not set or the default value is set to forced.\n\n :param Any value: Injected by CKAN core\n :rtype: Any\n \"\"\"\n return value if value and not force else default_value\n\n return default_setter", "def default(method):\n method._is_default = True # pylint: disable=protected-access\n return method", "def apply_generic_arg_defaults(t_args, t_kwargs):\n\n # don't worry about broken settings, validate_generic_args() will\n # take care of them\n\n if 'where_str' not in t_kwargs:\n t_kwargs['where_str'] = None\n\n if 'where_args' not in t_kwargs:\n t_kwargs['where_args'] = []\n\n if 'more_str' not in t_kwargs:\n t_kwargs['more_str'] = None\n\n if 'more_args' not in t_kwargs:\n t_kwargs['more_args'] = []", "def test_default_argument_null(self):\n @converters.wrap\n def inner_test(param: int = 14):\n \"\"\"Make sure the default was used.\"\"\"\n self.assertEqual(param, 14)\n inner_test(param=None)", "def with_default_args(\n self,\n args: Optional[Sequence[str]] = None,\n ) -> \"Container\":\n _args = [\n Arg(\"args\", args, None),\n ]\n _ctx = self._select(\"withDefaultArgs\", _args)\n return Container(_ctx)", "def interface_wrapper(doer, args='[]', varargs='None', varkw='None', defaults='None'):\n\n pass", "def default(access):\n return lambda *args, **kwargs: access", "def test_default(self):\n varargs = ()\n kwargs = {}\n method = getattr(self.foo,'f_default')\n var_dict = reassign_function_arguments(method, varargs, kwargs)\n self.assert_(var_dict['default'] == 1)\n self.assert_(len(var_dict) == 1)", "def get_default(self, obj):\n if callable(self.default):\n return self.default()\n else:\n return self.default", "def _visit_function_args(self, node, function_info, defaults, kw_defaults):\n self._check_method_first_arg(node, function_info=function_info)\n\n num_without_defaults = len(node.args.args) - len(defaults)\n defaults = [None] * num_without_defaults + defaults\n args = node.args.args\n\n if hasattr(node.args, \"kwonlyargs\"):\n args = args + node.args.kwonlyargs\n defaults = defaults + kw_defaults\n\n with qcore.override(self, \"state\", VisitorState.check_names):\n for idx, (arg, default) in enumerate(zip(args, defaults)):\n is_self = (\n idx == 0\n and self.current_class is not None\n and not function_info.is_staticmethod\n and not isinstance(node, ast.Lambda)\n )\n if getattr(arg, \"annotation\", None) is not None:\n value = self._value_of_annotated_arg(arg)\n if default is not None and not self.is_value_compatible(\n value, default\n ):\n self._show_error_if_checking(\n arg,\n \"Default value for argument %s incompatible with declared type %s\"\n % (arg.arg, value),\n error_code=ErrorCode.incompatible_default,\n )\n elif is_self:\n if function_info.is_classmethod or getattr(node, \"name\", None) in (\n \"__init_subclass__\",\n \"__new__\",\n ):\n assert self.current_class is not None\n value = SubclassValue(self.current_class)\n else:\n # normal method\n value = TypedValue(self.current_class)\n elif default is not None:\n value = unite_values(UNRESOLVED_VALUE, default)\n else:\n value = UNRESOLVED_VALUE\n\n if is_self:\n # we need this for the implementation of super()\n self.scope.set(\"%first_arg\", value, \"%first_arg\", self.state)\n\n with qcore.override(self, \"being_assigned\", value):\n self.visit(arg)\n\n if node.args.vararg is not None:\n value = TypedValue(tuple)\n # in py3 the vararg is wrapped in an arg object\n if hasattr(node.args.vararg, \"arg\"):\n vararg = node.args.vararg.arg\n arg_value = self._value_of_annotated_arg(node.args.vararg)\n if arg_value is not UNRESOLVED_VALUE:\n value = GenericValue(tuple, [arg_value])\n else:\n vararg = node.args.vararg\n self.scope.set(vararg, value, vararg, self.state)\n if node.args.kwarg is not None:\n value = GenericValue(dict, [TypedValue(str), UNRESOLVED_VALUE])\n if hasattr(node.args.kwarg, \"arg\"):\n kwarg = node.args.kwarg.arg\n arg_value = self._value_of_annotated_arg(node.args.kwarg)\n if arg_value is not UNRESOLVED_VALUE:\n value = GenericValue(dict, [TypedValue(str), arg_value])\n else:\n kwarg = node.args.kwarg\n self.scope.set(kwarg, value, kwarg, self.state)\n\n return args", "def default_args(self) -> Optional[list[str]]:\n _args: list[Arg] = []\n _ctx = self._select(\"defaultArgs\", _args)\n return _ctx.execute_sync(Optional[list[str]])", "def update_model_kwargs(model_default):\n def model_update_decorator(func):\n\n @functools.wraps(func)\n def update_wrapper(*args, **kwargs):\n updated_kwargs = update_model_kwargs_logic(model_default, kwargs)\n if 'verbose' in updated_kwargs:\n if updated_kwargs['verbose'] > 0:\n # Print out the full updated kwargs\n print(\"INFO:kgcnn: Updated model kwargs:\")\n pprint.pprint(updated_kwargs)\n\n return func(*args, **updated_kwargs)\n\n return update_wrapper\n\n return model_update_decorator", "def bind_with_defaults(self, *args, **kwargs):\n\t\targs = super().bind_partial(*args, **kwargs).arguments\n\t\targs.update({elem: self.defaults[elem] if elem in self.defaults else self.default\n\t\t\t\t\t\t\tfor elem in sig.parameters if elem not in args})\n\t\treturn super().bind(**args)", "def default(self):\r\n if callable(self._default):\r\n return self._default()\r\n\r\n return self._default", "def dict_default(x, key=None):\n if isinstance(x, NoDefault):\n if key is None:\n raise KeyError()\n else:\n raise KeyError(key)\n elif isinstance(x, CallIfDefault):\n return x()\n else:\n return x", "def _get_default(self):\n if callable(self.default):\n return self.default()\n else:\n return self.default", "def generic_command(replace_name: Optional[str] = None,\n default_parser: Callable[[Any], str] = default_parser,\n arg_parsers: Optional[Dict[str, Callable[[Any], str]]] = None,\n ignore_args: Optional[List[str]] = None,\n add_class_name: bool = True):\n def decorator(func):\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n return_value = func(*args, **kwargs)\n\n # Resolve parameters\n nonlocal ignore_args\n ignore_args = (ignore_args or []) + DEFAULT_IGNORE_ARGS\n\n unparsed_kwargs = {}\n if args or kwargs:\n arg_names = func.__code__.co_varnames\n # update with args\n unparsed_kwargs = {key: value for key, value in zip(arg_names[:len(args)], args) if value is not None}\n # update with kwargs\n unparsed_kwargs.update(dict(\n sorted(\n {k: v for k, v in kwargs.items() if v is not None}.items(),\n key=lambda items: arg_names[len(args):].index(items[0]))))\n\n command = \"\"\n\n if add_class_name:\n class_ = utils.get_defining_class(func)\n if class_:\n command += utils.pascal_to_snake_case(class_.__name__).strip('_').replace('_', '-') + ' '\n\n # Set command name\n final_name = func.__name__.strip('_').replace('_', '-')\n if replace_name is not None:\n final_name = replace_name.format(__name__=final_name, **unparsed_kwargs)\n if final_name:\n command += final_name + ' '\n\n default_arg_parser = utils.get_arg_default(generic_command, \"default_parser\")\n final_kwargs = {}\n for key, value in unparsed_kwargs.items():\n # Ignore argument\n if ignore_args and key in ignore_args:\n continue\n\n manual_arg_parser = arg_parsers[key] if arg_parsers and arg_parsers.get(key) else lambda x: None\n final_kwargs[key] = manual_arg_parser(value) or default_parser(value) or default_arg_parser(value)\n\n # Set arguments\n command += ' '.join(final_kwargs.values())\n\n commands.append(command.strip())\n return return_value\n\n return wrapper\n return decorator", "def declare_default_behavior(self, default_behavior):\n self.default_behavior = default_behavior\n return default_behavior", "def args(self, default_args=(), diff=()):\n args = self._args\n if not args:\n args = default_args\n\n return self.expand_vars(args, diff=diff)", "def _set_default_func(self):\n dim = self.space.dim\n if dim == 1:\n func = lambda x : [ 1. ]\n if dim == 2:\n func = lambda x,y : [ 1. ]\n if dim == 3:\n func = lambda x,y,z : [ 1. ]\n return func", "def _update_annotation_with_default(anno, name, default):\n # Create instance if is type class\n complete_annotation = anno\n if _is_dsl_type_cls(anno):\n complete_annotation = anno()\n complete_annotation.name = name\n if default is Input._EMPTY:\n return complete_annotation\n if isinstance(complete_annotation, Input):\n # Non-parameter Input has no default attribute\n if complete_annotation._is_parameter_type and complete_annotation.default is not None:\n # logger.warning(\n # f\"Warning: Default value of f{complete_annotation.name!r} is set twice: \"\n # f\"{complete_annotation.default!r} and {default!r}, will use {default!r}\"\n # )\n pass\n complete_annotation._update_default(default)\n return complete_annotation", "def add_default_reverse_numeric_op(op_name):\n add_reverse_numeric_op(\"__r%s__\"%op_name, getattr(operator, op_name))", "def _visit_arg_with_default(self, arg: ast.arg, default: ast.AST | None) -> str:\n name = self.visit(arg)\n if default:\n if arg.annotation:\n name += \" = %s\" % self.visit(default)\n else:\n name += \"=%s\" % self.visit(default)\n return name", "def _update_annotation_with_default(\n anno: Union[Annotation, Input, Output], name: str, default: Any\n ) -> Union[Annotation, Input, Output]:\n # Create instance if is type class\n complete_annotation = anno\n if _is_dsl_type_cls(anno):\n complete_annotation = anno()\n complete_annotation._port_name = name\n if default is Input._EMPTY:\n return complete_annotation\n if isinstance(complete_annotation, Input):\n # Non-parameter Input has no default attribute\n if complete_annotation._is_primitive_type and complete_annotation.default is not None:\n # logger.warning(\n # f\"Warning: Default value of f{complete_annotation.name!r} is set twice: \"\n # f\"{complete_annotation.default!r} and {default!r}, will use {default!r}\"\n # )\n pass\n complete_annotation._update_default(default)\n if isinstance(complete_annotation, Output) and default is not None:\n msg = (\n f\"Default value of Output {complete_annotation._port_name!r} cannot be set:\"\n f\"Output has no default value.\"\n )\n raise UserErrorException(msg)\n return complete_annotation", "def default(self):\n if callable(self._default):\n return self._default()\n\n return self._default", "def _add_default_reverse_op(op_name):\n _add_op(\"__r%s__\"%op_name, getattr(operator, op_name))", "def default(self) -> Any:\n raise NotImplementedError()", "def _replace_defaults_wrapper(cls: Type[_BaseTpcpObject], old_init: Callable) -> Callable:\n # We get the params of the old inits here and not at runtime to avoid issues when the class is subclassed and\n # super().__init__ is called before all parameters of the child object are set.\n # This way, the param checks only concern the parameters of the current class.\n params = get_param_names(cls)\n\n @wraps(old_init)\n def new_init(self: BaseTpcpObject, *args: Any, **kwargs: Any) -> None:\n # call the old init.\n old_init(self, *args, **kwargs)\n\n # Check if any of the initial values has a \"default parameter flag\".\n # If yes we replace it with a clone (in case of a tpcp object) or a deepcopy in case of other objects.\n # This is handled by the factory `get_value` method.\n for p in params:\n if isinstance(val := getattr(self, p), BaseFactory):\n setattr(self, p, val.get_value())\n\n # This is just for introspection, in case we want to know if we have a modified init.\n new_init.__tpcp_wrapped__ = True\n\n return new_init", "def _apply_defaults(self):\n # Applies normal parameter defaults\n for scalar_parameter, value in self._DEFAULT_PARAMETER_SCALARS.items():\n if scalar_parameter not in self.parameters:\n self.parameters[scalar_parameter] = copy.copy(value)\n\n # Applies defaults to all ramp parameters\n for table_parameter, table in self._DEFAULT_PARAMETER_TABLES.items():\n self.parameters[table_parameter] = [list(tup) for tup in table]\n self.parameters['_' + table_parameter] = zip(*self.parameters[table_parameter])", "def set_default_subparser(self, default, args=None):\n if not args:\n args = sys.argv[1:]\n if args[0] not in ['-h', '--help', '--version', '-info']:\n if args[0].find('-') != -1:\n msg = \"Defaulting to the 'run' command. Please update the\"\n msg += \" call of MontePython. For more info, see the help\"\n msg += \" string and/or the documentation \"\n warnings.warn(msg)\n args.insert(0, default)\n elif args[0] == '-info':\n msg = \"The info option has been turned into a command. \"\n msg += \"Please substitute '-info' with 'info' when running \"\n msg += \"MontePython\"\n warnings.warn(msg)\n args[0] = 'info'\n return args", "def set_arg_defaults(task: \"Task\", args: Tuple, kwargs: dict) -> Tuple[Tuple, dict]:\n # Start with given kwargs.\n kwargs2 = dict(kwargs)\n\n sig = task.signature\n for i, param in enumerate(sig.parameters.values()):\n if i < len(args):\n # User already specified this arg in args.\n continue\n\n elif param.name in kwargs2:\n # User already specificed this arg in kwargs.\n continue\n\n elif param.default != param.empty:\n # Default should be used.\n kwargs2[param.name] = param.default\n return args, kwargs2", "def default_transforms():\n\n return transforms.Compose([transforms.ToTensor(), normalize_transform()])", "def _get_default_arg(args, defaults, arg_index):\n if not defaults:\n return DefaultArgSpec(False, None)\n\n args_with_no_defaults = len(args) - len(defaults)\n\n if arg_index < args_with_no_defaults:\n return DefaultArgSpec(False, None)\n else:\n value = defaults[arg_index - args_with_no_defaults]\n if (type(value) is str):\n value = '\"%s\"' % value\n return DefaultArgSpec(True, value)", "def default(cls, ):\n return cls.fromMetrics()", "def default(cls, ):\n return cls.fromMetrics()", "def default_latlon(self, *args, latlon=True, **kwargs):\n method = kwargs.pop('_method')\n return method(self, *args, latlon=latlon, **kwargs)", "def get_arg_default(self, arg_name):\n raise NotImplementedError(\"ICallable.get_arg_default\")", "def __call__(self, *args, **kwargs):\n if len(args) == 0:\n args = tuple(arg.default() for arg in self[1:])\n return Call(self, *args, **kwargs)", "def _cook_args(self, val, axes, default=None):\n if val is None:\n return default\n\n # if val is scalar, make it same size as given axes\n if not hasattr(val, '__len__'):\n val = np.ones(len(axes))*val\n\n # if val is an array, make sure it has same size as axes\n if len(val) != len(axes):\n raise ValueError('value and axes have different lengths.')\n\n # if defaults given, fill up all dimensions not specified by axes to default values\n if default is not None:\n default = np.array(default)\n default[axes] = val\n return default\n return np.array(val)", "def test_monkey_patch_default_variable_placement_strategy(\n self, num_tasks, op_names, before_want_ps, after_want_ps):\n\n var_ops = [tf.Variable(0., name=op_name).op for op_name in op_names]\n before_device_fn = tf.compat.v1.train.replica_device_setter(\n ps_tasks=num_tasks)\n self.assertEqual(before_want_ps, [before_device_fn(op) for op in var_ops])\n\n with monkey_patch_default_variable_placement_strategy():\n after_device_fn = tf.compat.v1.train.replica_device_setter(\n ps_tasks=num_tasks)\n self.assertEqual(after_want_ps, [after_device_fn(op) for op in var_ops])\n\n # Check that monkey-patch is only for the context.\n before_device_fn = tf.compat.v1.train.replica_device_setter(\n ps_tasks=num_tasks)\n self.assertEqual(before_want_ps, [before_device_fn(op) for op in var_ops])", "def modify_class(cls,name=None,default=noop):\n def wrapper(fn):\n \"\"\"\n The actual decorator returned by modify_class, which actually modifies the class.\n \n @param fn: the function to decorate\n @return: the argument function.\n \"\"\"\n if name is None:\n name_ = fn.__name__\n else:\n name_ = name\n original_method = getattr(cls,name_,default)\n new_method = fn(original_method)\n setattr(cls,name_,new_method)\n return fn\n return wrapper", "def set_default(self, name, default, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info['default'] = self._get_enforced_type_value(\n opt_info['opt'], default)\n opt_info['location'] = LocationInfo(\n Locations.set_default,\n _get_caller_detail(3), # this function has a decorator to skip\n )", "def default_command(self, function):\r\n if Inspection.find_calling_module() == '__main__':\r\n if None in self._commands:\r\n defaults = (self._commands[None].__name__, function.__name__)\r\n raise self.Error('Found two default commands: %s and %s' % defaults)\r\n self._commands[None] = function\r\n return function", "def add_default_reverse_numeric_op(op_name):\n add_reverse_numeric_op(\"__r%s__\"%op_name)", "def get_default_config(cls):\n default = super(LSHNearestNeighborIndex, cls).get_default_config()\n\n lf_default = plugin.make_config(get_lsh_functor_impls())\n default['lsh_functor'] = lf_default\n\n di_default = plugin.make_config(get_descriptor_index_impls())\n default['descriptor_index'] = di_default\n\n hi_default = plugin.make_config(get_hash_index_impls())\n default['hash_index'] = hi_default\n default['hash_index_comment'] = \"'hash_index' may also be null to \" \\\n \"default to a linear index built at \" \\\n \"query time.\"\n\n h2u_default = plugin.make_config(get_key_value_store_impls())\n default['hash2uuids_kvstore'] = h2u_default\n\n return default", "def get_default(self):\r\n if self.has_default:\r\n if callable(self.default):\r\n return self.default()\r\n else:\r\n return self.default", "def register_default_plugins(self):\n self._requires_register_default = True", "def defaultargs(options):\n config = {}\n for longname, default, _ in options:\n config[longname] = default\n return config", "def parameters_default(cls):\n return cls._Parameters.__new__.__defaults__", "def compute_default(self):\n if self.default is None and callable(self.compute_default_fn):\n self.default=self.compute_default_fn() \n if self.default not in self.objects:\n self.objects.append(self.default)", "def makeDefaultParser(defaultLog=\"none\", **kwargs):\n return addDefaultArgs(argparse.ArgumentParser(**kwargs), defaultLog)", "def test_callable_defaults(self):\r\n v1 = GroovyTestModel.create(text='cross fingers')\r\n assert v1.return_default() == 5000", "def update_model_kwargs_logic(default_kwargs: dict = None, user_kwargs: dict = None):\n out = {}\n if default_kwargs is None:\n default_kwargs = {}\n if user_kwargs is None:\n user_kwargs = {}\n\n # Check valid kwargs\n for iter_key in user_kwargs.keys():\n if iter_key not in default_kwargs:\n raise ValueError(\"Model kwarg {0} not in default arguments {1}\".format(iter_key, default_kwargs.keys()))\n\n out.update(default_kwargs)\n\n # Nested update of kwargs:\n def _nested_update(dict1, dict2):\n for key, values in dict2.items():\n if key not in dict1:\n print(\"WARNING:kgcnn: Unknown model kwarg {0} with value {1}\".format(key, values))\n dict1[key] = values\n else:\n if isinstance(dict1[key], dict) and isinstance(values, dict):\n # The value is a dict of model arguments itself. Update the same way.\n dict1[key] = _nested_update(dict1[key], values)\n elif isinstance(dict1[key], dict) and not isinstance(values, dict):\n # If values is None, means no information, keep dict1 values untouched.\n if values is not None:\n raise ValueError(\"Can not overwriting dictionary of {0} with {1}\".format(key, values))\n else:\n # Just any other value to update\n dict1[key] = values\n return dict1\n\n return _nested_update(out, user_kwargs)", "def without_defaults(self):\n ...", "def apply(self, func, *args, **kwargs):\n pass", "def create_default(cls):\n raise NotImplementedError(common.OVERRIDE_MESSAGE)", "def test_onearg_and_default(self):\n varargs = (12,)\n kwargs = {}\n method = getattr(self.foo,'f_onearg_and_default')\n var_dict = reassign_function_arguments(method, varargs, kwargs)\n self.assert_(var_dict['arg1'] == 12)\n self.assert_(var_dict['default'] == 1)\n self.assert_(len(var_dict) == 2)\n var_dict = reassign_function_arguments(method, (12, 13), kwargs)\n self.assert_(var_dict['arg1'] == 12)\n self.assert_(var_dict['default'] == 13)\n self.assert_(len(var_dict) == 2)", "def defaults(self, **kwargs):\n for i in kwargs:\n self._.setdefault(i, kwargs[i])\n return self", "def _defaultFilter(self, *args, **kwargs):\n\n return True", "def argument(arg, default):\n return \"{0}={1}\".format(arg, default) if default else arg", "def SetDefaultParams(self, *args):\n return _BRepAlgo.BRepAlgo_NormalProjection_SetDefaultParams(self, *args)", "def _support_op(*args):\n def inner(func):\n for one_arg in args:\n _op_mapping_[one_arg] = func\n return func\n\n return inner", "def _defaultTransformer(self, working_stats, params):\n\n return working_stats", "def set_default_parameters(self):\n super().set_default_parameters()" ]
[ "0.82795125", "0.6885742", "0.6571699", "0.6179261", "0.60997844", "0.60997844", "0.60086507", "0.597563", "0.5938139", "0.59069514", "0.5854104", "0.57146806", "0.5672426", "0.5663039", "0.56135994", "0.55640954", "0.5539715", "0.5507615", "0.5487402", "0.5466438", "0.5432631", "0.541383", "0.5406963", "0.539561", "0.5395278", "0.5395278", "0.5379566", "0.5352047", "0.5345206", "0.5293504", "0.5289668", "0.52610826", "0.5251326", "0.5243171", "0.5221887", "0.5206433", "0.5181493", "0.5173365", "0.5165944", "0.51355034", "0.51295745", "0.5125083", "0.51205504", "0.51197577", "0.50962454", "0.5095522", "0.5087894", "0.5086534", "0.5075198", "0.5039925", "0.50166976", "0.50150776", "0.5010371", "0.5008131", "0.50020754", "0.4995637", "0.49933887", "0.4991319", "0.4991302", "0.49788535", "0.49769166", "0.4966978", "0.49347976", "0.49171352", "0.48854387", "0.48821327", "0.4872855", "0.4867309", "0.48638192", "0.48600262", "0.48600262", "0.48534256", "0.48429534", "0.48405865", "0.4830495", "0.48201305", "0.48167607", "0.48018765", "0.47808415", "0.47790566", "0.47663176", "0.4765365", "0.47638726", "0.47520724", "0.47506708", "0.47482944", "0.47439003", "0.47399494", "0.47331005", "0.4723553", "0.4714472", "0.47078824", "0.47056708", "0.47035113", "0.47022", "0.46886855", "0.46871847", "0.4683892", "0.46827358", "0.46737614" ]
0.8326598
0
Remove or similar decorators as well as and .
def remove_task_decorator(python_source: str, task_decorator_name: str) -> str: def _remove_task_decorator(py_source, decorator_name): if decorator_name not in py_source: return python_source split = python_source.split(decorator_name) before_decorator, after_decorator = split[0], split[1] if after_decorator[0] == "(": after_decorator = _balance_parens(after_decorator) if after_decorator[0] == "\n": after_decorator = after_decorator[1:] return before_decorator + after_decorator decorators = ["@setup", "@teardown", task_decorator_name] for decorator in decorators: python_source = _remove_task_decorator(python_source, decorator) return python_source
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear_decorators(self):\n self._decorators = []", "def apply_decorators(*decorators):\n def dec(fn):\n for d in reversed(decorators):\n fn = d(fn)\n return fn\n return dec", "def del_functions(self, *args):\n if len(args) > 0:\n attrs = args\n else:\n self._user_function.clear()", "def _reduce_decorators_to_pytest(self, decorators):\n reduced = []\n for decorator in decorators:\n try:\n if decorator.func.value.value.id == 'pytest':\n reduced.append(decorator)\n except AttributeError:\n pass\n return reduced", "def remove(func):", "def remove_decorations(self, ot: str):\n t = remove_cvref(ot)\n if is_pointer_type(t):\n return self.remove_decorations(pointer_base(t))\n if is_array_type(t):\n return self.remove_decorations(array_base(t))\n return t", "def multiple_decorator(x):\n return x", "def remove_command(self, func):\n del self.commands[func.__name__]", "def remove_feature_accessors(obj, feats: FeaturesTuple):\n for feat in feats:\n try:\n delattr(obj, feat.get_name())\n\n except AttributeError:\n pass", "def decorator_group(decorators):\n def group(f):\n for decorator in decorators:\n f = decorator(f)\n return f\n return group", "def decorate(*decorators):\n if len(decorators) > 1:\n decorators = list(decorators)\n decorators.reverse()\n\n def callback(frame, k, v, old_locals):\n for d in decorators:\n v = d(v)\n return v\n return decorate_assignment(callback)", "def decorator(func):\n\n pass", "def dispatch_as(*decorators):\n\n if len(decorators)>1:\n decorators = list(decorators)\n decorators.reverse()\n\n def callback(frame,k,v,old_locals):\n for d in decorators:\n v = d(v)\n return v\n\n from peak.util.decorators import decorate_assignment\n return decorate_assignment(callback)", "def remove_custom(self, opset: OpsetVersion) -> None:\n if not self._functions.custom_added(opset):\n warnings.warn(\n f\"No custom function registered for '{self._name}' opset {opset}\"\n )\n return\n self._functions.remove_custom(opset)", "def tearDown(self):\n ex2_decorators.ex_dec = self.original_decorator", "def remove():\n pass", "def remove_modifiers(*values, sort=False, mod_set=None):\n\tfeatures = []\n\tfor f in values:\n\t\t(name, mods) = split_modifiers(f, mod_set=mod_set)\n\t\tif name not in features:\n\t\t\tfeatures.append(name)\n\tif sort:\n\t\tfeatures.sort()\n\treturn features", "def test_skip_with_decorator_and_reason():\n pass", "def decorated(origFunc, newFunc, decoration='None'):\n\n pass", "def command_decorators_changes(query):\n return (\n query.select_method(\"cmd_groups\") # noqa: BLK100\n .rename(\"get_groups\")\n .select_method(\"cmd_screens\")\n .rename(\"get_screens\")\n .select_method(\"opacity\")\n .rename(\"set_opacity\")\n .select_method(\"cmd_opacity\")\n .rename(\"set_opacity\")\n .select_method(\"hints\")\n .rename(\"get_hints\")\n .select_method(\"cmd_hints\")\n .rename(\"get_hints\")\n )", "def remove_callback(self, chain):", "def undoc(func):\n return func", "def remove_handler ( handler_list, handler_function ):\n if handler_function in handler_list:\n handler_list.remove ( handler_function )", "def _decorate(obj):\n docstring=inspect.getdoc(obj) or ''\n MO=re.match(r'\\[([\\w_ ,]*)\\]',docstring)\n # for instance [name_1, name_2, name_3] is matched\n if MO:\n decorators=MO.group(1).split(',')\n try: dclasses=[StoredDecorators.dic[n] for n in decorators]\n except KeyError: raise UnknownDecoratorError(n)\n dclasses=noconflict.remove_redundant(dclasses)\n decname=''.join([d.__name__ for d in dclasses])\n decorator=StoredDecorators.dic.get(decname)\n if not decorator: decorator=makecls()(decname,dclasses,{})\n if issubclass(decorator,ClassDecorator): return decorator(obj)()\n return decorator(obj)", "def _func2_undecorated(arg1=None, arg2=None, arg3=None):\n pass", "def remove_robots(): #py:remove_robots\n RUR._remove_robots_()", "def __rmgeneric(path, __func__):\n try:\n __func__(path)\n #print 'Removed ', path\n except OSError, (_, strerror): #IGNORE:W0612\n print \"\"\"Error removing %(path)s, %(error)s \"\"\" % {'path' : path, 'error': strerror }", "def decorator():\n return _decorator", "def test_ignore_lack_of_metadata():\n\n def original(something, dispatcher, intent):\n \"\"\"Original!\"\"\"\n pass\n\n new_func = partial(original, \"something\")\n original.attr = 1\n wrapped = do(new_func)\n assert wrapped.__name__ == \"do_wrapper\"", "def removeControl(*args):", "def removeControl(*args):", "def removeControl(*args):", "def removeControl(*args):", "def inject_decorator_namespace(self) -> None:\n self.student_code.mod.turn_left = self.karel_action_decorator(\n self.karel.turn_left\n )\n self.student_code.mod.move = self.karel_action_decorator(\n self.karel.move\n )\n self.student_code.mod.put_beeper = self.beeper_action_decorator(\n self.karel.put_beeper\n )\n self.student_code.mod.pick_beeper = self.beeper_action_decorator(\n self.karel.pick_beeper\n )\n self.student_code.mod.paint_corner = self.corner_action_decorator(\n self.karel.paint_corner\n )\n self.student_code.mod.put_block = self.block_action_decorator(\n self.karel.put_block\n )\n self.student_code.mod.destroy_block = self.karel_action_decorator(\n self.karel.destroy_block\n )\n self.student_code.mod.remove_paint = self.karel_action_decorator(\n self.karel.remove_paint\n )", "def noop_decorator(func):\n return func", "def disable(func):\n return func", "def _draw_decorators(self):\n return \" \".join(self._decorators)", "def remove():", "def remove_ops(self):\n return self._remove_ops", "def auto_omit():\n\tpass", "def wrapper_fun(*args):\n print(\"Hello Decorator\")\n return fun(*args)", "def _hotfix_dispatch_remove():\n import sqlalchemy\n\n if sqlalchemy.__version__ >= \"0.9.4\":\n return\n\n from sqlalchemy.event.attr import _DispatchDescriptor\n from sqlalchemy.event import registry\n\n def remove(self, event_key):\n target = event_key.dispatch_target\n stack = [target]\n while stack:\n cls = stack.pop(0)\n stack.extend(cls.__subclasses__())\n if cls in self._clslevel:\n self._clslevel[cls].remove(event_key._listen_fn)\n registry._removed_from_collection(event_key, self)\n\n _DispatchDescriptor.remove = remove", "def remove_function(self, index = 0):\n raise NotImplementedError()", "def _generate_pytest_decorators(self, decorators):\n generated_decorators = []\n for decorator in decorators:\n mark_name = decorator.func.attr\n args = []\n for arg in decorator.args:\n args.append(\"'{}'\".format(arg.s))\n d = \"@pytest.mark.{}({})\".format(mark_name, ', '.join(args))\n generated_decorators.append(d)\n return generated_decorators", "def visit_FunctionDef(self, node):\n self.functions[node.name] = self._generate_pytest_decorators(node.decorator_list)\n self.generic_visit(node)", "def undecorate_func(func):\n while True:\n if func.__closure__:\n for cell in func.__closure__:\n if inspect.isfunction(cell.cell_contents):\n if func.__name__ == cell.cell_contents.__name__:\n func = cell.cell_contents\n break\n else:\n break\n return func", "def isDecorated(self):", "def _remove_operator(self, operator):", "def good_decorator(decorator): \n def new_decorator(f):\n g = decorator(f)\n g.__name__ = f.__name__\n g.__doc__ = f.__doc__\n g.__dict__.update(f.__dict__)\n return g\n \n new_decorator.__name__ = decorator.__name__\n new_decorator.__doc__ = decorator.__doc__\n new_decorator.__dict__.update(decorator.__dict__)\n\n return new_decorator", "def test_issue_55():\n\n # full name change including stack trace\n\n @with_signature('bar()')\n def foo():\n return 'a'\n\n assert \"bar at\" in repr(foo)\n assert foo.__name__ == 'bar'\n assert foo() == 'a'\n\n # only metadata change\n\n @with_signature(None, func_name='bar')\n def foo():\n return 'a'\n\n if sys.version_info >= (3, 0):\n assert \"foo at\" in repr(foo)\n assert foo.__name__ == 'bar'\n assert foo() == 'a'", "def undo(func):\r\n @wraps(func)\r\n def _undofunc(*args, **kwargs):\r\n try:\r\n # start an undo chunk\r\n mc.undoInfo(ock=True)\r\n return func(*args, **kwargs)\r\n finally:\r\n # after calling the func, end the undo chunk\r\n mc.undoInfo(cck=True)\r\n return _undofunc", "def _walk_decorator_stack(func: CallableT) -> Iterable['CallableT']:\n while hasattr(func, \"__wrapped__\"):\n yield func\n\n func = getattr(func, \"__wrapped__\")\n\n yield func", "def module(filter_):\n def decorator(module_fn):\n \"\"\"Decorates a module function.\"\"\"\n _FILTERS_AND_SAMPLERS.append((filter_, module_fn))\n return module_fn\n return decorator", "def get_decorators( this, fn) :\n\t\tnative_fn = this._get_native_function( fn)\n\t\tdecorators = []\n\t\t\n\t\tif this.DECORATORS in native_fn.__annotations__ :\n\t\t\tdecorators = native_fn.__annotations__[this.DECORATORS]\n\t\t\n\t\treturn decorators", "def cleanup(*args, **kwargs): # real signature unknown\n pass", "def setDecorated(self, decorated):", "def test_decorator(f):\n return f", "def remove(name):", "def remove_handler(handler_list, handler_function):\n if handler_function in handler_list:\n handler_list.remove(handler_function)", "def decorator(module_fn):\n _FILTERS_AND_SAMPLERS.append((filter_, module_fn))\n return module_fn", "def visit_Import(self, node: ast.Import) -> None: # pylint: disable=invalid-name\n for name in node.names:\n if name.name == self._decorator_package:\n if name.asname:\n # import <package> as <name>\n self._current_file_decorators.add(\n name.asname + '.' + self._decorator_symbol\n )\n else:\n # import <package>\n _, module = self._decorator_package.rsplit('.', 1)\n self._current_file_decorators.add(\n module + '.' + self._decorator_symbol\n )\n self.generic_visit(node)", "def builderRemoved(builderName):", "def removeBeforeRender(call, args=(), kwargs={}, nodeClass='Write'):", "def pre_delete_function_set(sender, instance, **kwargs):\n # Delete all functions (this also deletes the links).\n instance.function_set.all().delete()", "def unload_aliases():\n for name in COMMANDS:\n del builtins.aliases[name]\n alt_name = name.replace(\"_\", \"-\")\n if alt_name in builtins.aliases:\n del builtins.aliases[alt_name]", "def rm(*fns):\n for fn in fns:\n try:\n os.remove(fn)\n except FileNotFoundError:\n pass", "def __delitem__(name):", "def remove_blacklist(remit, source, commands):\n blacklisted_commands = list()\n for config in spec.BLACKLIST:\n command = make_command(remit=remit,\n source=source,\n writer=config['writer'],\n pandoc_options=config['pandoc_options'],\n extension=config['extension'])\n blacklisted_commands.append(command)\n commands = [command for command in commands\n if command not in blacklisted_commands]\n return commands", "def _postgres_remove():\n\n sql = []\n sql.append(f\"DROP FUNCTION IF EXISTS {fn_prefix}_history_id() CASCADE;\")\n sql.append(f\"DROP FUNCTION IF EXISTS {fn_prefix}_id() CASCADE;\")\n\n return sql", "def remove_listener(self, event_name, func):\n for w in self.wrappers[event_name]:\n if w[0] == func:\n self.wrappers[event_name].remove(w)\n return super().remove_listener(event_name, w[1])\n # if no wrapper exists try removing the function\n return super().remove_listener(event_name, func)", "def removeAfterRender(call, args=(), kwargs={}, nodeClass='Write'):", "def all_rest_methods(decorator):\n\n# ADD OTHER METHODS HERE, IF SOME ARE MISSING\n api_methods = ['get', 'post', 'put', 'patch', 'delete'] # , 'search']\n\n def decorate(cls):\n # there's propably a better way to do this\n for attr in cls.__dict__:\n # Check if method and in it's in my list\n if attr in api_methods and callable(getattr(cls, attr)):\n logger.debug(\"Decorating %s as api method\"\n % (cls.__name__ + \".\" + attr))\n setattr(cls, attr, decorator(getattr(cls, attr)))\n return cls\n return decorate", "def preserve_signature(decorator):\n\tdef new_decorator(f):\n\t\tg = decorator(f)\n\t\tg.__name__ = f.__name__\n\t\tg.__doc__ = f.__doc__\n\t\tg.__dict__.update(f.__dict__)\n\t\treturn g\n\t# Now a few lines needed to make simple_decorator itself\n\t# be a well-behaved decorator.\n\tnew_decorator.__name__ = decorator.__name__\n\tnew_decorator.__doc__ = decorator.__doc__\n\tnew_decorator.__dict__.update(decorator.__dict__)\n\treturn new_decorator", "def clean(cls, generator) -> None:\n\n for verb, event in generator.twimlir:\n if verb.is_ssml:\n verb.variable_name = camelize(\n f'ssml_{verb.name}', uppercase_first_letter=False\n )\n verb.method_name = camelize(verb.name, uppercase_first_letter=False)\n verb.name = camelize('ssml_' + verb.name)\n import_name = f\"import com.twilio.twiml.voice.{verb.name};\"\n generator.specific_imports.add(import_name)\n if verb.name == 'Sip' and verb.parent.name == 'Refer':\n verb.name = 'ReferSip'\n\n cls.verb_processing(verb, generator.specific_imports)\n\n rename_attr(verb, 'for', 'for_')\n rename_attr(verb, 'break', 'break_')", "def dec(func):\n\n\n def deco(*a, **b):\n \"\"\" Decorator for public method\"\"\"\n if not SettingsService.is_first_run():\n return func(*a, **b)\n raise cherrypy.HTTPRedirect('/settings/settings')\n\n\n return deco", "def remove(self):", "def deprecate(\n replacement: Optional[str] = None,\n deprecate: Optional[str] = None,\n remove: Optional[str] = None,\n ):\n def decorator(func):\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n if inspect.isclass(func):\n func_name = func.__class__.__name__\n else:\n func_name = func.__name__\n\n time_str = \"now\" if deprecate is None else deprecate\n\n s = f\"{func_name} is deprecated from {time_str}!\"\n\n if remove is not None:\n s += f\" It will be removed from nnDetection from {remove}\"\n if replacement is not None:\n s += f\" The replacement is {replacement}.\"\n else:\n s += f\" There will be no replacement.\"\n\n logger.warning(s)\n return func(*args, **kwargs)\n return wrapper\n return decorator", "def _del(self, *args):\n return _ida_hexrays.hexwarns_t__del(self, *args)", "def cleanup(modpath):\n yield\n\n # Remove dummy modules from sys.modules\n pathlist = [p for p in sys.modules\n if p != modpath and p.startswith(modpath)]\n\n for p in pathlist:\n del sys.modules[p]\n\n if modpath in sys.modules:\n del sys.modules[modpath]\n\n # Remove TaskImporter\n index = [i for i, obj in enumerate(sys.meta_path)\n if isinstance(obj, TaskImporter)]\n\n for i in reversed(index):\n assert isinstance(sys.meta_path[i], TaskImporter)\n sys.meta_path.pop(i)", "def replace_with(*, replacement):\n def _apply_decorator(f):\n return replacement\n return _apply_decorator", "def RemoveHooks(obj: object) -> None:\n for function in obj.__dict__.values():\n if not callable(function):\n continue\n\n hook_targets = getattr(function, \"HookTargets\", None)\n if hook_targets is None:\n continue\n\n for target in hook_targets:\n unrealsdk.RemoveHook(target, function.HookName)", "def remove_punc(self, r):\n c = ''\n useless = [',', '+', '-', '*', '/', '=', ',', '.']\n for d in r:\n if d not in useless:\n c += d\n brackets = ['(', ')', '[', ']', '{', '}', '<', '>']\n d = str(c)\n c = ''\n brac_cnt = 0\n for i in d:\n if i == '(' or i == '[' or i in '{':\n brac_cnt += 1\n if i == ')' or i == ']' or i == '}':\n brac_cnt -= 1\n if i not in brackets:\n if brac_cnt <= 0:\n c += i\n return c", "def stripall(self, lst):\n return map(operator.methodcaller(\"strip\"), lst)", "def _canDisable(func):\n def wrapper(*args, **kwargs):\n if _DISABLE_ASSERTIONS == 0:\n return func(*args, **kwargs)\n return wrapper", "def strip_html(func):\n\n cleaner = re.compile(\"<.*?>\")\n def new_func(*args, strip_html=False, **kwargs):\n name = func(*args, **kwargs)\n if strip_html:\n if isinstance(name, str):\n return html.unescape(re.sub(cleaner, \"\", name))\n elif isinstance(name, list) or isinstance(name, tuple):\n return type(name)([html.unescape(re.sub(cleaner, \"\", n)) for n in name])\n else:\n return name\n new_func.__name__ = func.__name__\n new_func.__doc__ = func.__doc__\n return new_func", "def _disable_decorator(msg):\n def decorator(func):\n @functools.wraps(func)\n def _wrapper(self, *args, **kwargs):\n raise RuntimeError(msg.format(func.__name__))\n _wrapper.__doc__ = None\n return _wrapper\n return decorator", "def remove(self, *args):\n pass", "def remove(self, middleware):\n pass # pragma: no cover", "def _decorate ( name = _name ) :\n import LoKiCore.decorators as _LoKiCore\n _mcp = 'const LHCb::MCParticle*'\n _mcv = 'const LHCb::MCVertex*'\n #_vp = std.vector( _mcp )\n #_vv = std.vector( _mcv )\n #_vd = std.vector( 'double' )\n _vp = 'std::vector<const LHCb::MCParticle*>' ## std.vector( _mcp )\n _vv = 'std::vector<const LHCb::MCVertex*>' ## std.vector( _mcv )\n _vd = 'std::vector<double>' ## std.vector( 'double' )\n #\n \n # MCParticle -> double\n \n _decorated = _LoKiCore.getAndDecorateFunctions ( \n name , ## modulr name \n LoKi.Functor (_mcp,'double') , ## the base\n LoKi.Dicts.FunCalls (LHCb.MCParticle) , ## call-traits\n LoKi.Dicts.FuncOps (_mcp,_mcp) ) ## operators&operations\n \n # MCVertex -> double\n \n _decorated != _LoKiCore.getAndDecorateFunctions ( \n name , ## moduel name \n LoKi.Functor (_mcv,'double') , ## the base\n LoKi.Dicts.FunCalls (LHCb.MCVertex) , ## call-traits\n LoKi.Dicts.FuncOps (_mcv,_mcv) ) ## operators&operations\n \n # MCParticle -> bool\n \n _decorated != _LoKiCore.getAndDecoratePredicates (\n name , ## module name \n LoKi.Functor (_mcp,bool) , ## the base\n LoKi.Dicts.CutCalls (LHCb.MCParticle) , ## call-traits\n LoKi.Dicts.CutsOps (_mcp,_mcp) ) ## operators&operations\n \n # MCVertex -> bool\n \n _decorated != _LoKiCore.getAndDecoratePredicates (\n name , ## module name \n LoKi.Functor (_mcv,bool) , ## the base\n LoKi.Dicts.CutCalls (LHCb.MCVertex) , ## call-traits\n LoKi.Dicts.CutsOps (_mcv,_mcv) ) ## operators&operations\n\n ## functional part:\n \n # vector<T> -> vector<double>\n \n _decorated |= _LoKiCore.getAndDecorateMaps (\n name , ## moduel name \n LoKi.Functor (_vp,_vd) , ## the base\n LoKi.Dicts.MapsOps(_mcp) ) ## call-traits\n _decorated |= _LoKiCore.getAndDecorateMaps (\n name , ## moduel name \n LoKi.Functor (_vv,_vd) , ## the base\n LoKi.Dicts.MapsOps(_mcv) ) ## call-traits\n\n # vector<T> -> vector<T>\n\n _decorated |= _LoKiCore.getAndDecoratePipes (\n name , ## module name \n LoKi.Functor (_vp,_vp) , ## the base\n LoKi.Dicts.PipeOps(_mcp,_mcp) ) ## call-traits\n _decorated |= _LoKiCore.getAndDecoratePipes (\n name , ## module name \n LoKi.Functor (_vv,_vv) , ## the base\n LoKi.Dicts.PipeOps(_mcv,_mcv) ) ## call-traits\n\n # vector<T> -> double\n \n _decorated |= _LoKiCore.getAndDecorateFunVals ( \n name , ## module name \n LoKi.Functor (_vp,'double') , ## the base\n LoKi.Dicts.FunValOps(_mcp) ) ## call-traits\n _decorated |= _LoKiCore.getAndDecorateFunVals ( \n name , ## module name \n LoKi.Functor (_vv,'double') , ## the base\n LoKi.Dicts.FunValOps(_mcv) ) ## call-traits\n\n # vector<T> -> bool\n\n _decorated |= _LoKiCore.getAndDecorateCutVals ( \n name , ## module name \n LoKi.Functor (_vp,bool) , ## the base\n LoKi.Dicts.CutValOps(_mcp) ) ## call-traits\n _decorated |= _LoKiCore.getAndDecorateCutVals ( \n name , ## module name \n LoKi.Functor (_vv,bool) , ## the base\n LoKi.Dicts.CutValOps(_mcv) ) ## call-traits\n\n #sources : void -> vector<T>\n\n _decorated |= _LoKiCore.getAndDecorateSources ( \n name , ## module name \n LoKi.Functor ('void',_vp) , ## the base\n LoKi.Dicts.SourceOps(_mcp,_mcp) ) ## call-traits\n _decorated |= _LoKiCore.getAndDecorateSources ( \n name , ## module name \n LoKi.Functor ('void',_vv) , ## the base\n LoKi.Dicts.SourceOps(_mcv,_mcv) ) ## call-traits\n\n\n ## primitive voids:\n\n _decorated |= _LoKiCore.getAndDecoratePrimitiveVoids ( name ) \n \n \n ## decorate pids (Comparison with strings, integers and ParticleID objects:\n for t in ( MCID , MCABSID ) :\n t = type ( t ) \n _LoKiCore.decoratePID ( t , LoKi.Dicts.PIDOps ( t ) )\n _decorated.add ( t )\n\n \n return _decorated", "def remove(self, *args):\n return _libsbml.ListOfFunctionDefinitions_remove(self, *args)", "def funcnamefilter(self, name):\n return not name.startswith('_')", "def funcnamefilter(self, name):\n return not name.startswith('_')", "def clean_python(c):\n _delete_pattern(\"__pycache__\")\n _delete_pattern(\"*.pyc\")\n _delete_pattern(\"*.pyo\")\n _delete_pattern(\"*~\")", "def clean_up_logging_decorator(func):\n filename = filename_regexp.match(inspect.getmodule(inspect.stack()[1][0]).__file__).group(1)\n\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n logger.info('{}: Start cleaning up'.format(filename))\n func(*args, **kwargs)\n logger.info('{}: Finished cleaning up'.format(filename))\n return wrapper", "def unregister():\n for name in _registered_ops:\n try:\n torch.onnx.unregister_custom_op_symbolic(name, _OPSET_VERSION)\n except AttributeError:\n # The symbolic_registry module was removed in PyTorch 1.13.\n # We are importing it here for backwards compatibility\n # because unregister_custom_op_symbolic is not available before PyTorch 1.12\n from torch.onnx import symbolic_registry\n\n namespace, kind = name.split(\"::\")\n for version in symbolic_helper._onnx_stable_opsets:\n if version >= _OPSET_VERSION and symbolic_registry.is_registered_op(kind, namespace, version):\n del symbolic_registry._registry[(namespace, version)][kind]", "def accept_funcs(func):\n @wraps(func)\n def wrapper(funcs, *args, **kwargs):\n if hasattr(funcs[0], '__name__'):\n funcs = [(f.__name__, f) for f in funcs]\n return func(funcs, *args, **kwargs)\n return wrapper", "def filter_methods(methods: list) -> list:\n \n if OCTOPUS_INCLUSION_PATTERNS:\n methods = filter_on_inclusion(OCTOPUS_INCLUSION_PATTERNS, methods)\n \n elif OCTOPUS_EXCLUSION_PATTERNS:\n methods = filter_on_exclusion(OCTOPUS_EXCLUSION_PATTERNS, methods) \n \n return methods", "def keyword_decorator(deco):\n\n @functools.wraps(deco)\n def new_deco(fn=None, **kwargs):\n if fn is None:\n\n @functools.wraps(deco)\n def newer_deco(fn):\n return deco(fn, **kwargs)\n\n return newer_deco\n else:\n return deco(fn, **kwargs)\n\n return new_deco", "def test_uses_wraps(self):\n @self.actions(\"ctx_name\", [])\n def myview(request, some_id):\n \"\"\"docstring\"\"\"\n\n self.assertEqual(myview.func_name, \"myview\")\n self.assertEqual(myview.func_doc, \"docstring\")", "def exclude(self, *args, **kwargs):" ]
[ "0.6751675", "0.6575618", "0.64617324", "0.6306262", "0.62398183", "0.61137396", "0.59577435", "0.5811717", "0.5579527", "0.55659074", "0.55599767", "0.5556737", "0.5375476", "0.537497", "0.53370106", "0.53344274", "0.53249407", "0.53044754", "0.5283288", "0.5226591", "0.52098817", "0.52075154", "0.5199972", "0.51950824", "0.51814985", "0.51555276", "0.5148256", "0.5145276", "0.51226276", "0.5121703", "0.5121703", "0.5121703", "0.5121703", "0.5103859", "0.5101407", "0.5092641", "0.5088474", "0.5070465", "0.506451", "0.5063243", "0.5059842", "0.5043122", "0.50324494", "0.5013188", "0.5003347", "0.49952972", "0.49930406", "0.49904263", "0.4954487", "0.495206", "0.49446142", "0.49260813", "0.4925268", "0.49228513", "0.49051213", "0.49012312", "0.48968634", "0.48968074", "0.4881574", "0.48812276", "0.48766026", "0.48742232", "0.486788", "0.48561338", "0.4848581", "0.48428103", "0.48331884", "0.48256284", "0.4822766", "0.4821948", "0.48217434", "0.48179415", "0.48145574", "0.48117673", "0.4808898", "0.48082113", "0.47982872", "0.4797194", "0.47950864", "0.479025", "0.47837", "0.4776153", "0.47705716", "0.47652635", "0.47635147", "0.47603643", "0.47603133", "0.47588444", "0.4755776", "0.47555733", "0.47475272", "0.47475272", "0.47416446", "0.47343746", "0.47329924", "0.47227857", "0.472226", "0.47205415", "0.47177434", "0.47157955" ]
0.57927924
8
Sending request with all mandatory fields with valid values
def test_01(self): assert 'True' == Api.requestBlock('test-01')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_validation_request(self):\r\n self.send_request(send_function=self._assemble_and_send_validation_request)", "def validate_request(req):\n mandatory_fields = conf[\"api\"][\"mandatory-fields\"]\n optional_fields = conf[\"api\"][\"optional-fields\"]\n\n if not req.content_length:\n return {\"invalid\": \"no data\"}\n\n data = req.get_json()\n\n for field in mandatory_fields:\n if field not in data:\n data[\"invalid\"] = f\"`{field}` must be supplied\"\n return data\n\n invalid = globals()[f\"invalid_{field}\"](data[field])\n if invalid:\n data[\"invalid\"] = invalid\n return data\n\n for field, default in optional_fields.items():\n try:\n invalid = globals()[f\"invalid_{field.replace('-', '_')}\"](data[field])\n if invalid:\n data[\"invalid\"] = invalid\n return data\n\n except KeyError:\n data[field] = default\n\n return data", "def validate(self):\n if self._data is None:\n raise BadRequest(\"Malformed request\")\n\n missing = []\n for field in self.MANDATORY:\n if self.get_parameter(field) is None:\n missing.append(field)\n\n if missing:\n raise BadRequest(\"Missing mandatory fields: {}\".format(missing))\n\n return True", "def test_empty_optionals(self):\n data = self.valid_payload\n # data[\"telephone\"] = \"\"\n # data[\"cellphone\"] = \"\"\n data[\"activity_description\"] = \"\"\n # data[\"about\"] = \"\"\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_empty_optionals(self):\n data = self.valid_payload\n data[\"telephone\"] = \"\"\n data[\"cellphone\"] = \"\"\n data[\"activity_description\"] = \"\"\n data[\"about\"] = \"\"\n data[\"institute\"] = \"\"\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def validate_manditory_field(schema, args):\n\tmissing_fields = [field for field in schema.keys() if field not in args.keys() and schema.get(field).get(\"reqd\")]\n\textra_fields = [field for field in args.keys() if field not in schema.keys()]\n\n\tif any([missing_fields, extra_fields]):\n\t\terr_msg = \"Invalid request parameters\"\n\t\tif missing_fields: err_msg += \", Missing Fields : (%s)\"%(\",\".join(missing_fields))\n\t\tif extra_fields: err_msg += \", Extra Fields : (%s)\"%(\",\".join(extra_fields))\n\n\t\traise Exception(err_msg)", "def test_incomplete_parameters(self):\n response = self.client.post(\n reverse(\"validate_cast_member\"),\n {'name': None,\n 'role': None,},\n )\n self.assertEqual(response.status_code, 400)\n response = self.client.post(\n reverse(\"validate_cast_member\"),\n {'name': \"Pepe X\",\n 'role': None,},\n )\n self.assertEqual(response.status_code, 400)\n response = self.client.post(\n reverse(\"validate_cast_member\"),\n {'name': None,\n 'role': \"Fotografรญa\",},\n )\n self.assertEqual(response.status_code, 400)", "def test_post_cve_id_empty_params(reg_user_headers):\n res = requests.post(\n f'{env.AWG_BASE_URL}{CVE_ID_URL}',\n headers=reg_user_headers,\n params={\n 'amount': '',\n 'batch_type': '',\n 'cve_year': '',\n 'short_name': ''\n }\n )\n # NOTE: there isn't a `short_name` error here, why?\n assert res.status_code == 400\n response_contains(res, 'amount')\n response_contains(res, 'cve_year')", "def request_fields(*req_args):\n\tdef decorator(f):\n\t\t@wraps(f)\n\t\tdef decorated(*args, **kwargs):\n\t\t\tif not g.req: return json_response(dict(description='JSON object must be passed as HTTP body with this request'), 422)\n\t\t\tmissing = []\n\t\t\tfor arg in req_args:\n\t\t\t\tif not g.req.has_key(arg): missing.append(arg)\n\t\t\tif missing: return json_response(dict(description='Mandatory request fields missing', missing_fields=missing), 422)\n\t\t\treturn f(*args, **kwargs)\n\t\treturn decorated\n\treturn decorator", "def check_for_required_fields(cls, fields=[], dataDict={}):\n\n validateRequired = Validate.required(fields=fields, dataDict=dataDict)\n if validateRequired['status'] == False:\n res = jsonify(\n {'status': 400, 'error': validateRequired['message'], 'data': []})\n return abort(make_response(res, 400))\n return True", "def test_missing_parameters(self):\n self.assertEqual(0, self.session.query(User).count())\n for i in ['username', 'email', 'password']:\n self.request.json_body = deepcopy(self.new_account)\n del self.request.json_body[i]\n result = users_post_view(self.request)['d']\n self.assertEqual(result, error_dict('api_errors', 'username, email, and password are all required string fields'))\n self.request.json_body = {}\n result = users_post_view(self.request)['d']\n self.assertEqual(result, error_dict('api_errors', 'username, email, and password are all required string fields'))\n self.request.json_body = {'username': 'justuser'}\n result = users_post_view(self.request)['d']\n self.assertEqual(result, error_dict('api_errors', 'username, email, and password are all required string fields'))", "def _get_req_data(kwargs):\n if request.data:\n req = json.loads(request.data, encoding='utf-8')\n else:\n req = request.args or request.form\n\n if 'coid' not in kwargs:\n required_args = [\n 'name',\n 'consrc'\n ]\n\n for arg in required_args:\n if arg not in req or req[arg] == '':\n return True, make_json_response(\n status=410,\n success=0,\n errormsg=gettext(\n \"Could not find the required parameter ({}).\"\n ).format(arg)\n ), req\n return False, '', req", "def test_missing_or_invalid_parameters(self, body):\n self._login_as_staff()\n response = self.client.post(self.path(), body)\n assert response.status_code == 400\n\n response = self.client.post(self.path(), body, format='json')\n assert response.status_code == 400", "def test_missing_data(self):\n\n response = self.client.post(\n self.reg_url,\n {},\n format=\"json\")\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertIn(b\"required\", response.content)", "def test_signup_invalid_params(self):\n url = '/0/chefs'\n\n # No data\n data = {}\n resp = self.client.post(url, data=data)\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.data['code'], 400)\n self.assertEqual(resp.data['message'], 'Invalid parameters')\n self.assertIn('raw', resp.data)\n error_keys = [e['field'] for e in resp.data['raw'] if 'field' in e]\n self.assertEqual(set(['email', 'name', 'language']), set(error_keys))\n\n # Everything but password or fb_access_token\n data = {\n 'email': 'johndoe@example.com',\n 'name': 'John',\n 'surname': 'Doe',\n 'language': 'es',\n }\n resp = self.client.post(url, data=data)\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.data['code'], 400)\n self.assertEqual(resp.data['message'], 'Invalid parameters')\n self.assertEqual(len(resp.data['raw']), 1)", "def test_make_order_with_some_data_missing(self):\n response = self.api_test_client.post('{}/orders'.format(\n self.BASE_URL), json={'item_name': 'Watermelon'}, headers={\n 'Content-Type': 'application/json'})\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response_as_json(\n response)['message'], 'Bad request. Missing required param')", "def test_empty_values(client):\n response=client.post(\"/signin\",data=dict(email=\"\", password=TestSignin.password), content_type=\"multipart/form-data\")\n data=json.loads(response.data)\n assert response.status_code==400\n assert data[\"error\"] == \"Please provide values for email and password\"", "def test_control_create_with_empty_field(self, field):\n request = self.prepare_control_request_body()\n request[field] = None\n\n response = self.api.post(all_models.Control, data=request)\n\n self.assert400(response)", "def test_empty_body_patch_request(self):\n self._login_as_staff()\n response = self.patch_request(\"\")\n assert response.status_code == 400\n\n response = self.patch_request({})\n assert response.status_code == 400", "def test_make_order_without_any_request_data(self):\n response = self.api_test_client.post('{}/orders'.format(\n self.BASE_URL), json={}, headers={\n 'Content-Type': 'application/json'})\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response_as_json(\n response)['message'], 'Bad request. Missing required param')", "def test_no_optionals(self):\n data = self.valid_payload\n del data[\"telephone\"]\n del data[\"cellphone\"]\n del data[\"activity_description\"]\n del data[\"about\"]\n del data[\"institute\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def verify_post_data ( ):\n # check every field is present\n try:\n request.json[ 'source_lang' ]\n request.json[ 'target_lang' ]\n request.json[ 'text' ]\n\n TranslatorApp.verify_rpc_value ( request.json )\n\n except KeyError: # All the values are not present\n # 400 Bad Request\n abort ( 400, \"All mandatory fields are not provided\" )\n except ValueError as err:\n # 422 Unprocessable Entity\n abort ( 422, \"Unprocessable value: {0}\".format ( err.args ) )\n except BadRequest:\n # 400 Bad Request\n abort ( 400, \"Provided values are having malformed syntax\" )", "def test_no_optionals(self):\n data = self.valid_payload\n # del data[\"telephone\"]\n # del data[\"cellphone\"]\n del data[\"activity_description\"]\n # del data[\"about\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def _validate_params(self):\n assert set(self.required_params) - set(self._params) == set()\n for par, val in self.optional_params.items():\n if par not in self._params:\n self._params[par] = val", "def required_params(arra):\n SPECIAL_CHECK = {\n 'info_hash': REASON_REQUEST_NO_INFO_HASH,\n 'peer_id': REASON_REQUEST_NO_PEER_ID,\n 'port': REASON_REQUEST_NO_PORT,\n }\n\n def wrap_funcn(cb):\n def funcn(*args, **kwargs):\n availablekeys = bottle.request.query.keys()\n for checkkey in arra:\n if not checkkey in availablekeys:\n if checkkey in SPECIAL_CHECK.keys():\n fail(SPECIAL_CHECK[checkkey])\n fail(REASON_REQUEST_ERROR)\n return cb(*args, **kwargs)\n\n return funcn\n\n return wrap_funcn", "def test_no_parameters(self):\n\n x = self.start_request_tests(service.get_request('POST', None))\n self.assert_response_status(x, 400)\n self.assertTrue('tree' in x.json()[u'message'])", "def _test_bad_request_empty_field(self, user, fields, empty_field, zendesk_mock_class, datadog_mock):\r\n altered_fields = fields.copy()\r\n altered_fields[empty_field] = \"\"\r\n resp = self._build_and_run_request(user, altered_fields)\r\n self._assert_bad_request(resp, empty_field, zendesk_mock_class, datadog_mock)", "def prepare_control_request_body():\n test_date = datetime.datetime.utcnow().strftime(\"%Y-%m-%d\")\n return {\n \"id\": 123,\n \"title\": \"new_control\",\n \"context\": None,\n \"created_at\": test_date,\n \"updated_at\": test_date,\n \"slug\": \"CONTROL-01\",\n \"external_id\": factories.SynchronizableExternalId.next(),\n \"external_slug\": factories.random_str(),\n \"kind\": \"test kind\",\n \"means\": \"test means\",\n \"verify_frequency\": \"test frequency\",\n \"assertions\": '[\"test assertion\"]',\n \"categories\": '[\"test category\"]',\n \"review_status\": all_models.Review.STATES.UNREVIEWED,\n \"review_status_display_name\": \"some status\",\n \"due_date\": test_date,\n \"created_by\": {\n \"email\": \"creator@example.com\",\n \"name\": \"External Creator\",\n },\n \"last_submitted_at\": test_date,\n \"last_submitted_by\": {\n \"email\": \"owner@example.com\",\n \"name\": \"External Owner\",\n },\n \"last_verified_at\": test_date,\n \"last_verified_by\": {\n \"email\": \"compliance@example.com\",\n \"name\": \"External Compliance\",\n },\n }", "def test_make_order_with_a_missing_field(self):\n\n\t\tres = self.login_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\torder_data = {\n\t\t\t\t\t\"owner\": \"Pemwa\",\n\t\t\t\t\t\"meal_name\": \"pizza\"\n\t\t\t\t\t }\n\n\t\tresponse = self.client().post(\n\t\t\t'/api/v2/orders',\n\t\t\theaders={\"x-access-token\": access_token},\n\t\t\tdata = json.dumps(\n\t\t\t\torder_data) , content_type = 'application/json')\n\t\tresult = json.loads(response.data)\n\t\tself.assertEqual(result[\"message\"], \"Missing argument\")\n\t\tself.assertEqual(response.status_code, 400)", "def test_signup_when_there_are_missing_fields(self):\n user = {\n 'firstname' : 'Caleb',\n 'lastname' : 'Mbugua',\n 'password' : '12345566'\n }\n\n response = self.client.post('/api/v1/register', json=user, headers={'Content-Type': 'application/json'})\n data = response.get_json()\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(data['status'], 400)\n self.assertEqual(data['message'], 'Invalid data. Please fill all the required fields')", "def test_no_email(self):\n response = self.register({\n 'first_name': \"David\",\n 'last_name': \"Smith\",\n 'password': \"******\",\n 'phone_number': \"012-345-6789\"\n })\n self.assertEqual(response.status_code, 400)\n self.assertDictContainsSubset({'message': \"Missing parameters\"}, response.json())", "def test_check_update_properly_build_request_when_no_custom_data_given():\n request = UpdateDetailRequest('v1', 'MyDevice', None)\n update_helper = UpdateCheckHelper(_api_key, _base_url)\n built_request = update_helper.build_request(request)\n body = json.loads(built_request.body)\n\n assert body['unitId'] == request.unit_id\n assert body['versionId'] == request.version_id\n assert 'customClientData' not in body\n\n headers = built_request.headers\n assert headers['Authorization'] == _api_key\n assert headers['Content-Type'] == 'application/json'", "def test_basic_generic_request(self, api_instance):\n action = \"BasicGenericRequest\"\n\n # Send a basic payload.\n params = {\n \"ADateTime\": datetime.datetime(2020, 10, 12),\n \"ATrueBool\": True,\n \"AFalseBool\": False,\n \"NoneShouldNotExist\": None,\n }\n\n request_params = api_instance.generic_request(action=action, params=params)\n self.assert_common_params(request_params, action=\"BasicGenericRequest\")\n assert request_params[\"ADateTime\"] == \"2020-10-12T00:00:00\"\n assert request_params[\"ATrueBool\"] == \"true\"\n assert request_params[\"AFalseBool\"] == \"false\"\n assert \"NoneShouldNotExist\" not in request_params", "def _check_required_fields(self):\n assert self.title\n assert self.format", "def test_required_mailing_address_missing(self):\r\n self.url_params['mailing_address'] = ''\r\n response = self.client.post(self.url, self.url_params)\r\n self.assertEqual(response.status_code, 400)\r\n obj = json.loads(response.content)\r\n self.assertEqual(\r\n obj['value'],\r\n u'Your mailing address is required',\r\n )", "def test_build_request(self):\n user = {\n 'nickname': 'foo',\n 'bloodtype': 'O',\n 'birthdateY': '1997',\n 'birthdateM': '12',\n 'birthdateD': '31'\n }\n c = Client(apikey='key', user=user)\n ret = c.apis['Dialogue'].build_request(age='17', mode='srtr',\n bloodtype='A')\n user['age'] = 17\n user['mode'] = 'srtr'\n user['bloodtype'] = 'A'\n self.assertEqual(sorted(ret), sorted(user))\n self.assertEqual(sorted(c.apis['Dialogue'].user), sorted(user))", "def test_case_empty(self):\n data = {\"numbers\": \"\"}\n response = self.client.post(\"/api/hi\", data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_create_risk_with_empty_field(self, field):\n risk_body = self.generate_risk_body()\n risk_body[field] = None\n\n response = self.api.post(all_models.Risk, data=risk_body)\n\n self.assert400(response)", "def test_missing_required(self):\n param_types = {\n 'string': str,\n 'unicode': unicode,\n 'integer': int,\n 'boolean': bool,\n 'list': list,\n 'json': 'json',\n 'datetime': 'datetime',\n 'date': 'date',\n }\n expected_types = {\n 'string': '',\n 'unicode': u'',\n 'integer': 0,\n 'boolean': False,\n 'list': [],\n 'json': u'',\n 'datetime': None,\n 'date': None,\n }\n handler = self.create_handler(r'')\n self.assertEqual(handler.get_params(param_types, required=True),\n expected_types)", "def __init__(self, *args, **kwds):\n if args or kwds:\n super(ModifyParametersRequest, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.major_ax is None:\n self.major_ax = 0.\n if self.minor_ax is None:\n self.minor_ax = 0.\n if self.coup_strength is None:\n self.coup_strength = 0.\n if self.limit_cycle is None:\n self.limit_cycle = 0.\n if self.forward_velocity is None:\n self.forward_velocity = 0.\n if self.curvature is None:\n self.curvature = 0.\n if self.x_offset is None:\n self.x_offset = []\n if self.y_offset is None:\n self.y_offset = []\n if self.coupling_1 is None:\n self.coupling_1 = []\n if self.coupling_2 is None:\n self.coupling_2 = []\n if self.coupling_3 is None:\n self.coupling_3 = []\n if self.coupling_4 is None:\n self.coupling_4 = []\n if self.coupling_5 is None:\n self.coupling_5 = []\n if self.coupling_6 is None:\n self.coupling_6 = []\n else:\n self.major_ax = 0.\n self.minor_ax = 0.\n self.coup_strength = 0.\n self.limit_cycle = 0.\n self.forward_velocity = 0.\n self.curvature = 0.\n self.x_offset = []\n self.y_offset = []\n self.coupling_1 = []\n self.coupling_2 = []\n self.coupling_3 = []\n self.coupling_4 = []\n self.coupling_5 = []\n self.coupling_6 = []", "def test_add_flow_request_only_one_validity_date_provided(self):\n for param in ('start_validity', 'expire_validity'):\n flow_request = self.flow_request.copy()\n del flow_request['start_validity']\n res = self._add_flow_request(flow_request=flow_request)", "def test_create_user_missing_mandatory_field_values(self):\n print(\"Missing First Name\")\n kwargs = {\"first_name\":\"\", \"return_response_obj\": True, \"return_failure_response\": True}\n response = self.test_create_user_endpoint(**kwargs)\n\n print(\"Verify Response body\")\n expected_result = self.test_args[\"expected_result\"][\"missing_first_name\"]\n actual_result = json.loads(response.text)[\"message\"]\n assert actual_result == expected_result, \"Test Failed.. Expected: {0}.. Actual: {1}\".format(expected_result,\n actual_result)\n\n print(\"Missing Last Name\")\n kwargs = {\"last_name\": \"\", \"return_response_obj\": True, \"return_failure_response\": True}\n response = self.test_create_user_endpoint(**kwargs)\n\n print(\"Verify Response body\")\n expected_result = self.test_args[\"expected_result\"][\"missing_last_name\"]\n actual_result = json.loads(response.text)[\"message\"]\n assert actual_result == expected_result, \"Test Failed.. Expected: {0}.. Actual: {1}\".format(expected_result,\n actual_result)\n\n print(\"Missing Password Name\")\n kwargs = {\"password\": \"\", \"return_response_obj\": True, \"return_failure_response\": True}\n response = self.test_create_user_endpoint(**kwargs)\n\n print(\"Verify Response body\")\n expected_result = self.test_args[\"expected_result\"][\"missing_password\"]\n actual_result = json.loads(response.text)[\"message\"]\n assert actual_result == expected_result, \"Test Failed.. Expected: {0}.. Actual: {1}\".format(expected_result,\n actual_result)\n\n print(\"Missing Email id\")\n kwargs = {\"email\": \"\", \"return_response_obj\": True, \"return_failure_response\": True}\n response = self.test_create_user_endpoint(**kwargs)\n\n print(\"Verify Response body\")\n expected_result = self.test_args[\"expected_result\"][\"missing_email\"]\n actual_result = json.loads(response.text)[\"message\"]\n assert actual_result == expected_result, \"Test Failed.. Expected: {0}.. Actual: {1}\".format(expected_result,\n actual_result)", "def get_required_params():\n return {}", "def test_empty_data(self, client):\n url = reverse('users:create')\n response = client.post(url)\n assert response.status_code == 200\n assert 'This field is required.' in str(response.content)", "def test_create_Student_missing_param(self):\n school_ids = self.create_School(1,20)\n url = '/students'\n\n \"\"\"Normal request\"\"\"\n data = {'first_name': 'Poompatai', 'last_name': 'Puntitpong','age': 20, 'nationality': 'Thailand', 'school': school_ids[0]}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n\n \"\"\"Missing first_name\"\"\"\n data = {'last_name': 'Puntitpong','age': 20, 'nationality': 'Thailand', 'school': school_ids[0]}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data['first_name'][0].code, 'required')\n\n \"\"\"Missing all\"\"\"\n data = {}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data['first_name'][0].code, 'required')\n self.assertEqual(response.data['last_name'][0].code, 'required')\n self.assertEqual(response.data['age'][0].code, 'required')\n self.assertEqual(response.data['nationality'][0].code, 'required')\n self.assertEqual(response.data['school'][0].code, 'required')", "def test_create_enforces_required_fields(self):\n serializer = ServiceSerializer(data = {}, context = dict(project = self.project))\n self.assertFalse(serializer.is_valid())\n required_fields = {'name', 'category'}\n self.assertCountEqual(serializer.errors.keys(), required_fields)\n for name in required_fields:\n self.assertEqual(serializer.errors[name][0].code, 'required')", "def _assemble_and_send_validation_request(self):\r\n # Fire off the query.\r\n response = self.client.service.validateShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestedShipment=self.RequestedShipment)\r\n return response", "def _required_parameters(self) -> RequiredParameters:\n return RequiredParameters([])", "def _required_parameters(self) -> RequiredParameters:\n return RequiredParameters([])", "def mandatory_parameter_check(params):\n required_params = ['user_text', 'latitude', 'longitude']\n result = {\n 'fault ': False,\n 'cause ': \" \"\n }\n\n cause = \"Mandatory parameter missing : \"\n for req_param in required_params:\n if req_param not in params:\n cause = cause + \" \" + req_param\n result['fault '] = True\n result['cause'] = cause\n\n return result", "def test_bad_parameters(self):\n self.assertEqual(0, self.session.query(User).count())\n for i in ['username', 'email', 'password']:\n self.request.json_body = deepcopy(self.new_account)\n for val in [x for x in bad_data_typevals_list if not isinstance(x, basestring)]:\n self.request.json_body[i] = val\n result = users_post_view(self.request)['d']\n self.assertEqual(result, error_dict('api_errors',\n 'username, email, and password are all required string fields'))", "def __init__(self, *args, **kwds):\n if args or kwds:\n super(pid_control_reqRequest, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.yaw is None:\n self.yaw = 0.\n else:\n self.yaw = 0.", "async def put_valid_missing_required( # pylint: disable=inconsistent-return-statements\n self, complex_body: JSON, *, content_type: str = \"application/json\", **kwargs: Any\n ) -> None:", "def _validate(self):\n REQUIRED_KEYS = [ 'name', 'year', 'artist_id', 'genre_ids', 'sources' ]\n\n missing_keys = get_missing_keys(self.request.data, REQUIRED_KEYS)\n if len(missing_keys) > 0:\n return f\"Request body is missing the following required properties: {', '.join(missing_keys)}.\"\n\n artist_id = self.request.data['artist_id']\n\n try:\n Artist.objects.get(pk=artist_id)\n except Artist.DoesNotExist:\n return \"`artistId` supplied does not match an existing artist.\" \n\n genre_ids = self.request.data['genre_ids']\n if len(genre_ids) == 0:\n return \"You must specify at least one genre id in `genreIds` array.\"\n\n for genre_id in genre_ids:\n try:\n Genre.objects.get(pk=genre_id)\n except Genre.DoesNotExist:\n return f\"The genre id {genre_id} does not match an existing genre.\"\n\n sources = self.request.data['sources']\n if len(sources) == 0:\n return \"You must specify at least one source in `sources` array.\"\n\n for source in sources:\n if 'service' not in source or 'url' not in source or 'is_primary' not in source:\n return \"All sources must contain `service`, `url`, and `is_primary` properties.\"\n\n primary_sources = [ source for source in sources if source['is_primary'] == True ]\n if len(primary_sources) != 1:\n return \"There must be one and only one primary source.\"\n\n return False", "def test_required_year_of_birth_missing(self):\r\n self.url_params['year_of_birth'] = ''\r\n response = self.client.post(self.url, self.url_params)\r\n self.assertEqual(response.status_code, 400)\r\n obj = json.loads(response.content)\r\n self.assertEqual(\r\n obj['value'],\r\n u'Your year of birth is required',\r\n )", "def test_cannot_make_sale_with_missing_fields(self):\n reply = self.admin_add_product()\n\n resp = self.admin_create_user()\n reply = self.attendant_login()\n token = reply['token']\n sale = dict(products = [\n {\n \"prod_name\":\"\", \n \"quantity\":10\n }\n\t ])\n resp = self.client.post(\n '/api/v1/sales',\n content_type='application/json',\n data=json.dumps(sale),\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'One of the fields is empty!')\n self.assertEqual(resp.status_code, 400)", "def test_empty_fields(self):\n with self.client:\n response = register_user(\n self, '', '', '', '')\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'fail')\n self.assertTrue(data['message'] == 'Validation errors.')\n self.assertTrue(response.content_type == 'application/json')\n self.assertEqual(response.status_code, 422)", "def test_missing_params(self):\n request = self.factory.post('/dois', format='json')\n response = views.doi_crossref(request)\n assert response.status_code == 400, 'missing doi should cause http status 400, not %d' % (response.status_code)", "def check_sync_input(self, sync_params):\n for req_param in REQUIRED_PARAMS:\n if req_param not in sync_params:\n logger.error(\"Received request with missing '%s' parameter\", req_param)\n raise YKSyncError('MISSING_PARAMETER', req_param)\n if req_param not in ('otp', 'nonce', 'yk_publicname') and not \\\n (sync_params[req_param] == '-1' or isinstance(sync_params[req_param], int)):\n logger.error(\"Input parameter '%s' is not correct\", req_param)\n raise YKSyncError('INVALID_PARAMETER', req_param)", "def test_input_schema(self, data, errors):\n resp = self.client.post(self.url, json=data)\n\n if not errors:\n assert resp.status_code == 200\n assert resp.get_json() == {\n 'status': 'OK',\n 'message': 'Data published via Upload service',\n }\n else:\n assert resp.status_code == 400\n assert resp.get_json() == {\n 'status': 'Error',\n 'message': 'Input payload validation failed',\n 'errors': {\n k: ['Missing data for required field.'] for k in errors\n },\n }", "async def put_valid_missing_required( # pylint: disable=inconsistent-return-statements\n self, complex_body: IO, *, content_type: str = \"application/json\", **kwargs: Any\n ) -> None:", "def test_required_goals_missing(self):\r\n self.url_params['goals'] = ''\r\n response = self.client.post(self.url, self.url_params)\r\n self.assertEqual(response.status_code, 400)\r\n obj = json.loads(response.content)\r\n self.assertEqual(\r\n obj['value'],\r\n u'A description of your goals is required',\r\n )", "def test_custom_required(self):\n for data in ({}, {'payment_amount': ''}):\n form = DonationAmountForm(data=data)\n self.assertFalse(form.is_valid())\n errors = form.errors.as_data()\n self.assertTrue('payment_amount' in errors)\n self.assertEqual('required', errors['payment_amount'][0].code)", "def _validate_http_request(self):\n if self.path != '/':\n print('Invalid request path:', self.path)\n self.send_error(HTTPStatus.NOT_FOUND, 'Request Must Have Path Of /')\n raise ValueError\n\n content_type = self.headers.get('Content-Type', None)\n if content_type != 'application/json':\n print('Invalid request Content-Type:', self.path)\n self.send_error(HTTPStatus.BAD_REQUEST, 'Content-Type Must Be application/json')\n raise ValueError", "def test_POST2(self):\n payload = {\n \"id\": \"222\",\n \"make\": \"Wolkswagen\",\n \"model\": \"Golf\",\n \"year\": 2011,\n \"price\": 8600\n }\n r = requests.post(self.address, json=payload)\n self.assertEqual(r.status_code, 400)", "def test_missing_parameters(self):\n # request\n request_body = {\n 'wwuid': self.student.wwuid,\n 'enroll_key': self.labgroup.enroll_key\n }\n response = self.client.post(reverse(self.view_name), request_body)\n # test response\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n # test database\n self.assertEqual(Student.objects.first().labgroup, None)", "def validation(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def test_user_list_post_missing_parameters(client):\n\n response = client.post(\n \"/users\",\n headers={\n \"Accept\": \"application/vnd.api+json\",\n \"Content-Type\": \"application/vnd.api+json\",\n },\n data=json.dumps(\n dict(\n first_name=\"first\", last_name=\"last\", username=\"username\", email=\"email\"\n )\n ),\n )\n assert response.status_code == 400\n assert get_content_type(response) == \"application/vnd.api+json\"\n assert json.loads(response.data.decode()) == dict(\n message=dict(password=\"Missing required parameter in the JSON body\")\n )", "def buildReq(cmd, target, sequence, fieldList):\n return buildCmd(\"REQ\", cmd, target, sequence, fieldList)", "def __check_supplied_params(event, resp):\n\n use_start_end = False\n use_range = False\n\n # check if start/end was provided in the request, \n # indicate subseq-type is 'start-end' in response data dict\n if event['queryStringParameters']:\n params = event['queryStringParameters']\n if \"start\" in params.keys() or \"end\" in params.keys():\n use_start_end = True\n resp.put_data(\"subseq-type\", \"start-end\")\n \n # check if Range header was provided in the request,\n # indicate subseq-type is 'Range' in response data dict\n if \"Range\" in event['headers']:\n use_range = True\n resp.put_data(\"subseq-type\", \"range\")\n \n # if both start/end and AND Range header, this is a BAD REQUEST\n if use_start_end and use_range:\n resp.set_status_code(SC.BAD_REQUEST)\n resp.set_body(json.dumps({\n \"message\": \"Cannot provide both sequence start/end AND Range\"\n }))", "def validate_request_cerberus(schema):\n def decorator(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n body_json = request.get_json()\n current_app.logger.info(body_json)\n v = Validator(schema, require_all=True)\n v.allow_unknown = True # TODO: allow request params other then the ones defined on the schema level\n if not v.validate(body_json):\n valid_params_list = ', '.join(schema.keys())\n return response_fail(f\"You must call with all request params: {valid_params_list}\")\n return func(*args, **kwargs)\n return wrapper\n return decorator", "def test_empty_data(self, client, users):\n user = users[0]\n url = reverse('users:update', args=(user.pk,))\n response = client.post(url)\n assert response.status_code == 200\n assert 'This field is required.' in str(response.content)", "def validate(self):\n if self.params.get(\"format\"):\n if self.params[\"format\"] not in formats:\n raise ValueError(f\"format must be one of {formats}: {self.dt}\")\n for p in self.required:\n if not self.params.get(p):\n raise ValueError(f\"{p} missing: {self.dt}\")", "def validate_on_put_request(self, data, **kwargs):\n if request.method == \"PUT\":\n if \"bio\" not in data or \"website\" not in data:\n raise ValidationError(\"Missing one or more fields.\")", "def test_missing_fields(client, mocker):\n data = {}\n for _ in range(5):\n data[FAKE.text()] = FAKE.text()\n mocker.patch(\n \"ecommerce.views.IsSignedByCyberSource.has_permission\", return_value=True\n )\n try:\n # Missing fields from Cybersource POST will cause the KeyError.\n # In this test we want to make sure we saved the data in Receipt for later\n # analysis even if there is an error.\n client.post(reverse(\"order-fulfillment\"), data=data)\n except KeyError:\n pass\n\n assert Order.objects.count() == 0\n assert Receipt.objects.count() == 1\n assert Receipt.objects.first().data == data", "def _validate_rpc_request(self, request):\n if request.get('jsonrpc', None) != '2.0':\n raise ValueError('Invalid jsonrpc: must be \"2.0\"')\n\n id = request.get('id', None)\n if not (id is None or isinstance(id, (str, int, float))):\n raise ValueError('Invalid id: must be null, string or number')\n\n method = request.get('method', None)\n if not isinstance(method, str):\n raise ValueError('Invalid method: must be string')\n\n params = request.get('params', [])\n if not isinstance(params, (dict, list)):\n raise ValueError('Invalid params: must be array or object')", "def test_fewer_parameters(self):\n data_github = {\"version_control\": \"github\", \"scm_commit\": \"AA\", \"repo\": \"AA\", \"branch\": \"AA\", \"enabled\": 1}\n data_git = {\"version_control\": \"git\", \"scm_commit\": \"AA\", \"repo\": \"AA\", \"branch\": \"AA\", \"enabled\": 1}\n\n for data in [data_git, data_github]:\n resp = self.client.post(\"/tracking\", json=data, content_type=\"application/json\", headers=self.auth)\n resp_dict = json.loads(resp.data)\n self.assertIn(\"code\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(\n ResponseCode.INPUT_PARAMETERS_ERROR, resp_dict.get(\"code\"), msg=\"Error in status code return\"\n )\n\n self.assertIn(\"msg\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(\n ResponseCode.CODE_MSG_MAP.get(ResponseCode.INPUT_PARAMETERS_ERROR),\n resp_dict.get(\"msg\"),\n msg=\"Error in status code return\"\n )\n\n self.assertIn(\"data\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(resp_dict.get(\"data\"), None, msg=\"Error in data information return\")", "def test_required_gender_missing(self):\r\n self.url_params['gender'] = ''\r\n response = self.client.post(self.url, self.url_params)\r\n self.assertEqual(response.status_code, 400)\r\n obj = json.loads(response.content)\r\n self.assertEqual(\r\n obj['value'],\r\n u'Your gender is required',\r\n )", "def test_signup_required_fields(self, mock_update) -> None:\n user_data = {\n 'username': 'alexphi57',\n 'first_name': 'alex',\n 'last_name': 'bill',\n 'email': 'AP_TEST_foo57@gmail.com',\n 'password1': 'barfoobas',\n 'password2': 'barfoobas',\n 'phone': '+18001234567',\n 'resorts': []\n }\n resp = self.client.post(reverse('signup'), data=user_data)\n self.assertEqual(resp.status_code, 302)\n\n # Include resorts causes error\n user_data['resorts'] = ['test1']\n resp = self.client.post(reverse('signup'), data=user_data)\n self.assertEqual(resp.status_code, 200)\n\n # Include contact_days only causes error\n user_data['contact_days'] = [\"Mon\"]\n resp = self.client.post(reverse('signup'), data=user_data)\n self.assertEqual(resp.status_code, 200)\n\n # Include contact_method only causes error\n del user_data['contact_days']\n user_data['contact_method'] = 'email'\n resp = self.client.post(reverse('signup'), data=user_data)\n self.assertEqual(resp.status_code, 200)\n\n # Include both contact_days and contact_method works\n user_data['contact_days'] = [\"Mon\"]\n user_data['username'] = 'alexphi18'\n user_data['phone'] = '+18009876543'\n user_data['email'] = 'AP_TEST18@gmail.com'\n resp = self.client.post(reverse('signup'), data=user_data)\n self.assertEqual(resp.status_code, 302)", "def validate_params(self, ctx):\n try:\n return self._validate_params()\n except formencode.Invalid as exc:\n unpack = exc.unpack_errors()\n self.__request.set_property(lambda: unpack,\n self._invalid_params_attr,\n reify=True)\n if self._raise_exc is True:\n self._raise(self._invalid_params_exc, unpack)", "def validate_and_modify(data):\r\n\r\n\tkeys = data.keys()\r\n\r\n\tif not 'method' in keys or type(data['method']) != str:\r\n\t\tprint_validation_error('method')\r\n\t\treturn None\r\n\r\n\tdata['method'] = data['method'].upper()\r\n\tif not data['method'] in HTTP_METHODS:\r\n\t\tprint_validation_error('method')\r\n\t\treturn None\r\n\r\n\tif not 'url' in keys or type(data['url']) != str or data['url'] == '':\r\n\t\tprint_validation_error('url')\r\n\t\treturn None\r\n\r\n\tif 'description' in keys:\r\n\t\tdata.pop('description')\r\n\r\n\t# requests.request takes data, not body.\r\n\tif 'body' in keys:\r\n\t\tdata['data'] = data.pop('body')\r\n\r\n\treturn data", "def test_no_password(self):\n response = self.register({\n 'first_name': \"David\",\n 'last_name': \"Smith\",\n 'email': \"david.smith@mom.com\",\n 'phone_number': \"012-345-6789\"\n })\n self.assertEqual(response.status_code, 400)\n self.assertDictContainsSubset({'message': \"Missing parameters\"}, response.json())", "def test_submit_form_using_valid_data():", "def _validate_params(self):\n raise NotImplementedError('Must be implemented in subclasses.')", "def testParams(self):\n h = self.getHandler()\n rq = '{\"jsonrpc\":\"2.0\", \"method\":\"myMethod\", \"params\":[\"A\",\"B\", \"BAD\"], \"id\":\"1\"}'\n \n messages, dummy = h.parse_body(rq)\n msg = messages[0]\n h.handle_message(msg)\n self.assertTrue(isinstance(msg.error, InvalidParamsError))", "def make_blank_request(self, *args, **kwargs):\n factory = self.get(abcs.ARequest)\n request = factory.blank(*args, app=self, **kwargs)\n self._set_request_attributes(request)\n return request", "def test_post_invalid(self):\n for field in ['language', 'style']:\n response = self.post(\n **{'content': 'foo', field: '123-invalid-abc'})\n self.assertEqual(\n response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_build_params( self ):\n r = Requester( self.logger )\n ( partnership_id, authorization_id, pickup_location, search_key, search_value ) = ( 'a', 'b', 'c', 'd', 'e' )\n params = r.build_params( partnership_id, authorization_id, pickup_location, search_key, search_value )\n self.assertEqual(\n ['ExactSearch', 'Notes', 'PartnershipId', 'PickupLocation'],\n sorted(params.keys()) )", "def __init_request(self, req):\n return defines.ReturnCode.SUCC", "def test_create_user_missing_fields(self):\n payload = {\n 'email': 'email',\n 'password': ''\n }\n res = self.client.post(CREATE_USER_API, payload)\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)", "def _validate(self, body):\n if not body:\n raise exception.BadRequest(\"The request contains an empty body\")\n\n if not body.get('databases', ''):\n raise exception.BadRequest(\"Required element/key 'databases' was \"\n \"not specified\")\n for database in body.get('databases'):\n if not database.get('name', ''):\n raise exception.BadRequest(\"Required attribute/key 'name' was \"\n \"not specified\")", "def required_fields(required_fields=[]):\n def decorator(func):\n \"\"\" The decorator applied to the obj_create method\"\"\"\n def wrapper(resource, bundle=None, **kwargs):\n \"\"\" wraps the decorated method and verifies a list of required\n fields when a new object is being created.\n\n \"\"\"\n if not isinstance(bundle, Bundle):\n request = bundle\n data = resource.deserialize(\n request, request.body,\n format=request.META.get('CONTENT_TYPE', 'application/json')\n )\n bundle = resource.build_bundle(request=request, data=data)\n else:\n request = None\n\n for required_field in required_fields:\n if required_field not in bundle.data:\n response = HttpBadRequest(\n json.dumps(\"missing %s field\" % required_field),\n content_type=bundle.request.META['CONTENT_TYPE'])\n raise ImmediateHttpResponse(response=response)\n return func(resource, bundle=bundle, **kwargs)\n return wrapper\n return decorator", "def test_signup_when_empty_data_provided(self):\n user = {}\n\n response = self.client.post('/api/v1/register', json=json.dumps(user), headers={'Content-Type': 'application/json'})\n data = response.get_json()\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(data['status'], 400)\n self.assertEqual(data['message'], 'Invalid data. Please fill all the required fields')", "def test_create_new_user_blank_fields(self):\n self.maxDiff = None\n data = {\n 'email': '',\n 'password': '',\n }\n\n response = self.client.post(\n reverse('user-list'),\n data,\n format='json',\n )\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n content = {\n 'email': ['This field may not be blank.'],\n 'password': ['This field may not be blank.'],\n }\n self.assertEqual(json.loads(response.content), content)", "def _assemble_and_send_request(self):\r\n # We get an exception like this when specifying an IntegratorId:\r\n # suds.TypeNotFound: Type not found: 'IntegratorId'\r\n # Setting it to None does not seem to appease it.\r\n del self.ClientDetail.IntegratorId\r\n self.logger.debug(self.WebAuthenticationDetail)\r\n self.logger.debug(self.ClientDetail)\r\n self.logger.debug(self.TransactionDetail)\r\n self.logger.debug(self.VersionId)\r\n # Fire off the query.\r\n response = self.client.service.addressValidation(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestTimestamp=datetime.now(),\r\n Options=self.AddressValidationOptions,\r\n AddressesToValidate=self.addresses_to_validate)\r\n return response", "def check_mandatory(params: Dict[str, str]):\n for key, val in params.items():\n if val is None or val == '':\n raise ValueError(f'Missing mandatory param: `{key}`.')", "def missing_data(self, data):\n missing_fields = []\n for key in data:\n if not key in request.json:\n missing_fields.append(key)\n if missing_fields:\n message = 'Missing ' + ', '.join(missing_fields)\n return self.bad_request(message)\n return None", "def test_required_name_attribute_is_in_the_request_payload_and_has_a_value(self):\n with self.client:\n token = self.get_user_token()\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'No attribute or value was specified, nothing was changed')", "def test_blank_email(self):\n response = self.register({\n 'first_name': \"David\",\n 'last_name': \"Smith\",\n 'password': '******',\n 'email': \"\",\n 'phone_number': \"012-345-6789\"\n })\n self.assertEqual(response.status_code, 400)\n self.assertDictContainsSubset({'message': \"Password/Email cannot be empty\"}, response.json())", "def validate(self, request):\n\n value = request._get_parameter_value(self)\n\n if value.object is None:\n if self.required:\n self.add_error(request, \"This input is required\")\n\n return\n\n self.do_validate(request, value.object)", "def params_required(self) -> bool:\n if self.no_params or self.params_optional:\n return False\n else:\n return True" ]
[ "0.67361516", "0.6645965", "0.64985675", "0.61909497", "0.6140521", "0.61338866", "0.60920036", "0.60691214", "0.60643446", "0.60591084", "0.60197985", "0.6019011", "0.5985431", "0.59851086", "0.5952253", "0.5949522", "0.594586", "0.5931081", "0.5922004", "0.5914755", "0.58972734", "0.5895534", "0.5889529", "0.58681846", "0.58653766", "0.5842317", "0.580175", "0.5801109", "0.57995903", "0.5792344", "0.57744867", "0.57540166", "0.574778", "0.57368827", "0.5720694", "0.5691442", "0.5686709", "0.56733674", "0.56729573", "0.5662496", "0.56530696", "0.5650517", "0.56482905", "0.5647961", "0.5631874", "0.5601817", "0.5591793", "0.5589535", "0.5589535", "0.55882686", "0.55867267", "0.55810016", "0.5580829", "0.5577639", "0.55741215", "0.55595785", "0.55592036", "0.55572385", "0.554826", "0.55389977", "0.5533713", "0.5533005", "0.55303115", "0.5527882", "0.5527458", "0.5512245", "0.5503906", "0.5499011", "0.54865503", "0.54783446", "0.54775643", "0.54762626", "0.5474616", "0.5464225", "0.5459473", "0.54575455", "0.54574084", "0.54542845", "0.5453775", "0.54525244", "0.5449206", "0.54472864", "0.5444049", "0.5438321", "0.54358524", "0.5433837", "0.54332846", "0.5431365", "0.5430613", "0.54278404", "0.5424443", "0.5419723", "0.5414759", "0.54105115", "0.5406339", "0.5393999", "0.53919476", "0.53915936", "0.53898203", "0.5385381", "0.53844714" ]
0.0
-1
Send null value in Key field
def test_02(self): assert 'False' == Api.requestBlock('test-02')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setnoempty(self, key, value):\r\n if value:\r\n self[key] = value", "def get(self, key):\n return \"\"", "def test_key_no_data(self):\n key = Key({})\n\n assert key.warning is None\n assert key.in_car is None", "def __missing__(self, key):\n return key", "def convert_nulls(dic, null_value):\n for key in dic.iterkeys():\n if dic[key] is None:\n dic[key] = null_value", "def _key(self):\n return None", "def key(self):\n return None", "def keyEquivalent( self ):\n\t\treturn None", "def nulls_to_empty(dic, *keys):\n if not keys:\n keys = dic.keys()\n for key in keys:\n if dic[key] is None:\n dic[key] = ''\n return None", "def setdefault_key_value(self):\n raise NotImplementedError", "def test_neg_operate_key_is_none(self):\n llist = [{\"op\": aerospike.OPERATOR_PREPEND, \"bin\": \"name\", \"val\": \"ram\"}]\n try:\n self.as_connection.operate(None, llist)\n\n except e.ParamError as exception:\n assert exception.code == -2", "def default(self, key):\n raise KeyError(repr(key))", "def _remove_nulls(self, params):\n\n if params is not None:\n return {key:value for key, value in params.items() if value is not None}\n\n return {}", "def getDbStrNone(self, db, key):\n \n value = self.get(db + \".\" + key)\n if value == None:\n value = self.get(\"default.\" + key)\n return value", "def test_missingKey(self):\n self.assertIsNone(self.users.key(\"mystery domain\", \"mystery user\"))", "def testNoneValue(self):\n objectID = uuid4()\n user = createUser(u'username', u'password', u'User',\n u'user@example.com')\n namespace = createNamespace(user, u'name')\n tag = createTag(user, namespace, u'tag')\n self.store.add(TagValue(user.id, tag.id, objectID, None))", "def test_map_update_none_deletes_key(self):\r\n # partition = uuid4()\r\n # cluster = 1\r\n # TestQueryUpdateModel.objects.create(\r\n # partition=partition, cluster=cluster,\r\n # text_map={\"foo\": '1', \"bar\": '2'})\r\n # TestQueryUpdateModel.objects(\r\n # partition=partition, cluster=cluster).update(\r\n # text_map__update={\"bar\": None})\r\n # obj = TestQueryUpdateModel.objects.get(partition=partition, cluster=cluster)\r\n # self.assertEqual(obj.text_map, {\"foo\": '1'})\r", "def __init__(__self__, *,\n key: Optional[pulumi.Input[str]] = None,\n value: Optional[pulumi.Input[str]] = None):\n if key is not None:\n pulumi.set(__self__, \"key\", key)\n if value is not None:\n pulumi.set(__self__, \"value\", value)", "def key(nullable=True):\n return sa.Column(\n \"key\",\n sa.Text().with_variant(mysql.VARCHAR(255), \"mysql\"),\n nullable=nullable,\n )", "def create_raw(self, key, value):\n data = None\n if key is not None and value is not None:\n try:\n data = self.tcex.key_value_store.create(self._context, key.strip(), value)\n except RuntimeError as e:\n self.log.error(e)\n else:\n self.log.warning('The key or value field was None.')\n return data", "def _add_missing_keys(self):\n for k, v in self.defaults.items():\n if k not in self.data:\n self.data[k] = v\n\n self.save()", "def __missing__(self, key):\n global MISSING\n MISSING = key # For debugging - save name of missing key\n return INVALID", "def test_add_none_field(self):\n user_id = get_rand_string()\n data = get_rand_string()\n id = get_rand_string()\n\n doc = {}\n doc[\"user_id\"] = user_id\n doc[\"data\"] = data\n doc[\"id\"] = id\n doc[\"num\"] = None\n\n self.conn.add(**doc)", "def test_map_update_none_deletes_key(self):\n partition = uuid4()\n cluster = 1\n TestQueryUpdateModel.objects.create(\n partition=partition, cluster=cluster,\n text_map={\"foo\": '1', \"bar\": '2'})\n TestQueryUpdateModel.objects(\n partition=partition, cluster=cluster).update(\n text_map__update={\"bar\": None})\n obj = TestQueryUpdateModel.objects.get(partition=partition, cluster=cluster)\n self.assertEqual(obj.text_map, {\"foo\": '1'})", "def test_neg_operate_empty_string_key(self):\n llist = [{\"op\": aerospike.OPERATOR_PREPEND, \"bin\": \"name\", \"val\": \"ram\"}]\n try:\n self.as_connection.operate(\"\", llist)\n\n except e.ParamError as exception:\n assert exception.code == -2", "def _add_key(self, key):\n key = key.strip().strip('@').lower()\n if self.homogenise_fields:\n if key in list(self.alt_dict.keys()):\n key = self.alt_dict[key]\n if not isinstance(key, ustr):\n return ustr(key, 'utf-8')\n else:\n return key", "def prepare_key(self, key):\n return smart_str(key)", "def _set_if_not_none(self, field_key, value, verifier=str):\n\n\t\tif value is None:\n\t\t\treturn\n\n\t\tif verifier is not None:\n\t\t\tvalue = verifier(value)\n\n\t\tself.data[field_key] = value", "def __init__(self):\n self._key = ''", "def test_key_none(self):\n try:\n AlphaVantage()\n self.fail(msg='A None api key must raise an error')\n except ValueError:\n self.assertTrue(True)", "def test_key_none(self):\n try:\n AlphaVantage()\n self.fail(msg='A None api key must raise an error')\n except ValueError:\n self.assertTrue(True)", "def fmt_option_key(key, value):\n if value is None:\n return \"\"\n return f\"{key}={value}\"", "def setdefault(self, key):\n pass", "def check_integrity(dict):\n if (dict['type'] == 'string') and (dict['value'] == None or dict['value'] == ''):\n dict['value'] = '\"\"'", "def _prepare_get_request(self, key):\n\n return {\n 'TableName': self.table_name,\n 'Key': {\n self._key_field.name: {\n self._key_field.data_type: key\n }\n }\n }", "def key_not(self, key_not):\n\n self._key_not = key_not", "def key(self, key: \"str\"):\n if key is None:\n raise ValueError(\"Invalid value for `key`, must not be `None`\")\n self._attrs[\"key\"] = key", "def get_value(self, query_dict, k): \n if k in query_dict:\n return query_dict[k]\n return ''", "def get_prep_value(self, value):\r\n if value == \"\" or value is None:\r\n return None\r\n\r\n #if isinstance(value, dict):\r\n value = json.dumps(value, default=encode_object, ensure_ascii=False, separators=(',',':'))\r\n\r\n return super(JSONField, self).get_prep_value(value)", "def getStrNo(self, key):\n value = self.getConf(key);\n if value == \"no\":\n return None\n else:\n return value", "def test_setter_invalid_key(self):\n root = netapp_api.NaElement('root')\n try:\n root[None] = 'value'\n except Exception as e:\n if not isinstance(e, KeyError):\n self.fail(_('Error not a KeyError.'))", "def test_setter_invalid_key(self):\n root = netapp_api.NaElement('root')\n try:\n root[None] = 'value'\n except Exception as e:\n if not isinstance(e, KeyError):\n self.fail(_('Error not a KeyError.'))", "def _remove_none_fields(klass, d):\n\n return dict((k, v) for k, v in d.iteritems() if v is not None)", "def validate (self):\n if kw.has_key (name):\n self.keyvals[name] = kw[name]\n elif obj.default is not None:\n self.keyvals[name] = obj.default\n else:\n if obj.required:\n raise AttributeError (\"This field is required\")", "def validate(self, data: Dict):\n for key in self.__dict__.keys():\n if not key.startswith('__') and key != 'id':\n if data[key] == '' or data[key] is None:\n raise ValidationError(\n message=f'{key} should not be \"{data[key]}\"'\n )", "def get_empty_values(self):\n values_dict = self.__class__.objects.values().get(pk=self.pk)\n d = {\n k: v for k, v in zip(values_dict.keys(), values_dict.values())\n if v is None or v is u''\n }\n return d", "def _remove_none(self, data):\r\n for key, value in data.items():\r\n if value is None or isinstance(value, forms.Field):\r\n del data[key]\r\n if isinstance(value, dict):\r\n self._remove_none(data[key])", "def _prepare_put_request(self, key, value, timestamp, timeout, put_if_exists):\n req = {\n 'TableName': self.table_name,\n 'Item': {\n self._key_field.name: {\n self._key_field.data_type: key\n },\n self._value_field.name: {\n self._value_field.data_type: b64encode(value).decode('ascii')\n },\n self._timestamp_field.name: {\n self._timestamp_field.data_type: str(int(timestamp))\n },\n self._expiry_field.name: {\n self._expiry_field.data_type: str(int(timestamp + timeout))\n }\n }\n }\n\n if not put_if_exists:\n req.update({\n 'ConditionExpression' : 'attribute_not_exists(%s)' % self._key_field.name\n })\n\n return req", "def add_default_params(self, params):\n params['key'] = self.key\n params['format'] = self.format\n #params['unique_id'] = generate_unique_id()\n return params", "def validate_key(self, key: keyType) -> bool:\n if isinstance(key, (dict,bool)):\n raise Exception\n if key is None:\n raise Exception\n # Numerical key object has no len(),\n # so explicitly specify which types are not allowed to use empty value as keys\n if isinstance(key, (str, tuple, set, list)) and (len(key) == 0):\n raise Exception\n return True", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def setkey(self, key, value):\n if value == \"\":\n return\n if key == \"agency_id\":\n self.agency_id = value\n elif key == \"agency_name\":\n self.agency_name = value\n elif key == \"agency_url\":\n self.agency_url = value\n elif key == \"agency_timezone\":\n self.agency_timezone = value\n elif key == \"agency_lang\":\n self.agency_lang = value\n elif key == \"agency_phone\":\n self.agency_phone = value\n elif key == \"agency_fare_url\":\n self.agency_fare_url = value\n elif key == \"agency_email\":\n self.agency_email = value\n else:\n raise InvalidKeyError(key)", "def naics_agg_key(record: dict) -> Optional[str]:\n if record[\"naics_code\"] is None:\n return None\n return json.dumps({\"code\": record[\"naics_code\"], \"description\": record[\"naics_description\"]})", "def dumps(tuple_dict, key):\n tuple_dict[key] = json.dumps(tuple_dict[key]) if tuple_dict[key] != None else ''", "def test_empty_key_string(self):\n def x():\n y = pyamf.MixedArray()\n y.update({'': 1, 0: 1})\n self.encode(y)\n\n self.assertRaises(pyamf.EncodeError, x)", "def test_set_missing_keys_1(self):\n data_dict = {\"type\":\"add\", \"cluster\":\"\"}\n key_set = set([\"type\", \"host_genus\"])\n tickets.set_missing_keys(data_dict, key_set)\n with self.subTest():\n self.assertEqual(len(data_dict.keys()), 3)\n with self.subTest():\n self.assertEqual(data_dict[\"host_genus\"], \"\")", "def get(self, key: str, default: Optional[str] = None) -> Optional[str]:\n if key == \"id\":\n return \"Id\"\n return key", "def _check_missing(self, key: str, value: Any):\n required = from_dot_notation(\n field=\".\".join([*self.parents, key]), obj=self.definition\n ).get(\"required\", True)\n\n if required and value is None:\n raise Exception(f\"Value for '{key}' is empty but a value is required\")", "def __str__(self):\n return '<{}>'.format(self.key.id())", "def __str__(self):\n return '<{}>'.format(self.key.id())", "def raw_recordval(record, key):\n if key in record:\n return str(record[key]).strip()\n return \"\"", "def __init__(self, key=None):\n self.key = key", "def none_to_empty(data):\n return data if data is not None else ''", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def _get_raw_key(self, key_id):", "def _populate_always_present_fields(self, field):\n defaults = [\n (\"label\", \"\"),\n (\"instructions\", \"\"),\n (\"placeholder\", \"\"),\n (\"defaultValue\", \"\"),\n (\"restrictions\", {}),\n (\"errorMessages\", {}),\n ]\n field.update({\n key: value\n for key, value in defaults if key not in field\n })", "def get(self, key, default=None):", "def test_field_none_nullable(self):\n node_dict = {\n 'host_name': 'abc'\n }\n try:\n Node(**node_dict)\n except Exception as e:\n self.assertEqual(type(e), ValueError)", "def key(key):\n return key", "def remove_none(obj: dict):\n return remap(\n obj, visit=lambda path, key, value: key is not None and value is not None\n )", "def key(self):\n def validate(name):\n '''Compute the key if necessary and validate'''\n found = getattr(self, name)\n value = found() if callable(found) else found\n if value is None:\n raise BadKeyError(\"The key for %s cannot be None\" % self)\n return str(value) \n if self.__key is None:\n namespace, kind, key = Schema.Get(self)\n self.__id = key\n value = validate(key)\n self.__key = Key(namespace, kind, value)\n else:\n self.__key.id = validate(self.__id)\n return self.__key", "def __init__(__self__, *,\n key: pulumi.Input[str],\n value: pulumi.Input[str]):\n pulumi.set(__self__, \"key\", key)\n pulumi.set(__self__, \"value\", value)", "def __init__(__self__, *,\n key: pulumi.Input[str],\n value: pulumi.Input[str]):\n pulumi.set(__self__, \"key\", key)\n pulumi.set(__self__, \"value\", value)" ]
[ "0.6627444", "0.64162874", "0.63429326", "0.6278156", "0.62152123", "0.61957705", "0.60758203", "0.6026496", "0.5976624", "0.58816373", "0.5840957", "0.583311", "0.58217424", "0.5807191", "0.5738377", "0.5710752", "0.570884", "0.57058185", "0.57004374", "0.5699024", "0.56988484", "0.5664367", "0.5654356", "0.56166446", "0.55951804", "0.558265", "0.558203", "0.5582007", "0.5571181", "0.5569456", "0.5569456", "0.5550074", "0.5547686", "0.5533762", "0.55233943", "0.5519277", "0.5512604", "0.5497484", "0.5467094", "0.54519784", "0.5451607", "0.5451607", "0.54415864", "0.5439102", "0.54229194", "0.5415752", "0.54018176", "0.538566", "0.5377153", "0.5376524", "0.53631383", "0.5356993", "0.5356976", "0.53543335", "0.53527516", "0.5350948", "0.5348332", "0.5324154", "0.5323862", "0.5323862", "0.53179795", "0.5316098", "0.53092295", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5306653", "0.5289136", "0.5278087", "0.5272883", "0.52703035", "0.5259624", "0.52562004", "0.5251641", "0.52432925", "0.52423483", "0.52423483" ]
0.0
-1
Send special characters in Key field
def test_03(self): assert 'False' == Api.requestBlock('test-03')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def prepare_key(self, key):\n return smart_str(key)", "def _safe_key(self, key):\n if isinstance(key, str):\n key = key.encode('UTF-8')\n return key", "def _GetKeyString(self):", "def _GetKeyString(self):", "def _encode_key(self, key: str) -> str:\n return key", "def _add_key(self, key):\n key = key.strip().strip('@').lower()\n if self.homogenise_fields:\n if key in list(self.alt_dict.keys()):\n key = self.alt_dict[key]\n if not isinstance(key, ustr):\n return ustr(key, 'utf-8')\n else:\n return key", "def process_key(key):\n print(chr(key))", "def _encode_key(self, key):\n return key.encode() if isinstance(key, str) else key", "def format_key(self, key):\n if self.in_quotes(key):\n key = self.format_value(key) \n else:\n if key.lower() in ALL_KEYWORDS:\n key = self.format_value(key.upper())\n else:\n key = self.format_value(self.add_quotes(key.lower()))\n\n return key", "def encode_key_for_mongo(fieldname):\r\n for char in [\".\", \"$\"]:\r\n fieldname = fieldname.replace(char, '%{:02x}'.format(ord(char)))\r\n return fieldname", "def sendkey_escape(string):\r\n return re.sub(r'([+^%~{}\\[\\]()])', r'{\\1}', string)", "def _key_func_2(entry: tuple[str, list]) -> str:\n key = unicodedata.normalize('NFD', entry[0].lower())\n if key.startswith('\\N{RIGHT-TO-LEFT MARK}'):\n key = key[1:]\n if key[0:1].isalpha() or key.startswith('_'):\n key = chr(127) + key\n return key", "def key(key):\n return key", "def on_press(key):\n output_file.write(str(key).replace(\"'\", \"\"))", "def key_error_message(self,key):\n if not key:\n return 'key is blank.'\n elif any(map(lambda s: s in key,space_chars)):\n return '\"{}\" contains whitespace.'.format(key)\n elif any(map(lambda s: s in key,bad_chars)):\n return '\"{}\" contains special characters.'.format(key)", "def post_key(self):\n # print(self.key)\n #Sending the key to the attacker.\n s.send(bytes(\"K\\n{}\".format(str(self.key,'utf-8')),'utf-8'))", "def _get_key(self, key_column):\n return key_column.text.replace(u'\\xa0', u' ')", "def key(param: str):\n if re.match(r'^[A-Z_]+$', param):\n return 'KEY_' + param\n return 'KEY_' + re.sub(r'([a-z]|[A-Z]{2,})([A-Z]|\\d$)', r'\\1_\\2', param).upper()", "def _format_key(self, k: str) -> str:\n if k[0] == '.':\n k = k[1:]\n k = k.replace('.', '_')\n k = k.upper()\n k = re.sub(self.KEY_REGEX, '', k)\n return k", "def encode_email(email, key):\n return", "def add_key(mu_key):\n params['key'] = mu_key", "def _key_name(self, key):\n if type(key) == type(\"\"):\n return str(curses.keyname(ord(key)).decode(\"utf-8\"))\n return False", "def url_Keyword(self,key):\n url=key.replace('www','mbasic')\n #url=key.replace(\"%20\",\"+\")\n \n return url", "def prepKey(key, length):\r\n keyString = \"\"\r\n while len(keyString) < length:\r\n for char in key:\r\n if len(keyString) < length:\r\n keyString += char\r\n return keyString", "def _GetKeyString(self):\n return self.__key_string", "def caesar_encode(self, text, key):\n result_list = []\n for char in text:\n if char.isalpha():\n if char.islower():\n offset = ASCII_LOWER_OFFSET\n else:\n offset = ASCII_UPPER_OFFSET\n char = chr((ord(char) - offset + key) % ALPHABET_SIZE + offset)\n result_list.append(char)\n return ''.join(result_list)", "def _encode_query_key(text, maximal=True):\n if maximal:\n bytestr = normalize('NFC', text).encode('utf8')\n return u''.join([_QUERY_KEY_QUOTE_MAP[b] for b in bytestr])\n return u''.join([_QUERY_KEY_QUOTE_MAP[t] if t in _QUERY_KEY_DELIMS else t\n for t in text])", "def encode_k_v(obj):\n parts = []\n for key, value in obj.items():\n if b'&' in key or b'&' in value or b'=' in key or b'=' in value:\n raise ValueError('Invalid character.')\n parts.append(key + b'=' + value)\n return b'&'.join(parts)", "def _decode_key(self, key):\n if hasattr(key, \"char\"):\n return str(key.char).lower()\n elif hasattr(key, \"name\"):\n return str(key.name).lower()", "def _build_key(self, key: str) -> str:\n return \"-\".join((self._name, key))", "def MakeKey(self, string, string_1, string_2):\n ...", "def _getAttributeKeyFromDBKey(dbKey):\n return dbKey[dbKey.find(\"\\x1D\")+1:]", "def key2basename(self, key):\n for char, replacement in self.dangerous_chars.items():\n key = key.replace(char, replacement)\n return key", "def sanitize_key(key):\n return re.sub('\\W|^(?=\\d)','_', key)", "def prepare_key (self, key, for_seq):\n r_key = \"%s:%d:%s\" % (self.classkey, for_seq, key)\n return r_key", "def clean(key):\n\treturn key.strip().replace('%','').replace(' ', '-')", "def as_key(key):\n return key.lstrip('/').rstrip('/')", "def __GetKeyString(self):\n return self._GetKeyString()", "def handle_key(self, key):\n pass", "def decode_key_from_mongo(fieldname):\r\n return urllib.unquote(fieldname)", "def to_safe_annotation_key(key):\n safe_key = key.translate(str.maketrans(\"\", \"\", string.punctuation))\n return safe_key", "def key(name):\n return (\n Literal(name) ^\n (sep('\\'') + Literal(name) + sep('\\'')) ^\n (sep('\"') + Literal(name) + sep('\"')))", "def cast_name(key):\n special_symbols = set('{}{}'.format(punctuation, ' '))\n special_symbols.remove('_')\n new_key = ['_' if x in special_symbols else x for x in key]\n casted_key = ''.join(new_key)\n return casted_key", "def test_value_special_chars(self):\n raw = [\n 0x48,\n 0x65,\n 0x79,\n 0x21,\n 0x3F,\n 0x24,\n 0x20,\n 0xC4,\n 0xD6,\n 0xDC,\n 0xE4,\n 0xF6,\n 0xFC,\n 0xDF,\n ]\n string = \"Hey!?$ ร„ร–รœรครถรผรŸ\"\n self.assertEqual(DPTString.to_knx(string), raw)\n self.assertEqual(DPTString.from_knx(raw), string)", "def set_key(vars): #vars=[0]num,[1]rWord,[2]rString\r\n nonlocal key\r\n nonlocal x\r\n x=vars[0]\r\n if (vars[1]=='yes'):\r\n key['reverse_word']=True\r\n if (vars[2]=='yes'):\r\n key['reverse_string']=True\r\n if (x<-26 or x>26):\r\n x=x%26 #makes x to be in range\r\n if (x==0):\r\n x=random.randrange(-26,26) #random number\r\n for i in range (97,123): #26 ABC letters, ASCII value of 'a' is 97 97+26=123\r\n if(i+x>122):\r\n key[chr(i)]=chr(i-25+x)\r\n elif (i+x<97):\r\n key[chr(i)]=chr(i+26+x)\r\n else:\r\n key[chr(i)]=chr(i+x)\r\n print(\"done\")", "def transform_key(self, key):\n return key.lower()", "def test_contains_special_characters(self):\n for c in b\"\\0\", b\"\\n\", b\"\\r\":\n\n value = b\"foo\" + c + b\"bar\"\n result = attributeAsLDIF(b\"key\", value)\n self.assertEqual(result, b\"key:: %s\\n\" % encode(value))", "def process_key_string(keys: List[str]) -> str:\n result: str = ''\n\n for key in keys:\n if key in string.ascii_letters:\n result += key\n elif key in string.digits:\n result += string.digits\n elif key == 'space' or key == 'enter':\n result += ' '\n elif key == 'shift':\n result += ' <shift> '\n elif key == 'backspace':\n result = result[0:-1]\n elif key == '.' or key == ',':\n result += key\n else:\n print(key)\n return result", "def serialize_key(key: str) -> bytes:\n return key.encode(\"utf-8\")", "def key():", "def test_startswith_special_character(self):\n for c in b\"\\0\", b\"\\n\", b\"\\r\", b\" \", b\":\", b\"<\":\n\n value = c + b\"value\"\n result = attributeAsLDIF(b\"key\", value)\n self.assertEqual(result, b\"key:: %s\\n\" % encode(value))", "def _character_to_key(character: str) -> str:\n if not character.isalnum():\n key = unicodedata.name(character).lower().replace(\"-\", \"_\").replace(\" \", \"_\")\n else:\n key = character\n key = KEY_NAME_REPLACEMENTS.get(key, key)\n return key", "def serialize_key(key) -> str:\n if not isinstance(key, str):\n key = repr(key)\n return key", "def do_key(self, cmd):\n self.params.set('key', cmd, 'global')", "def test_generate_key(self): \n k = Key().generate()\n self.assertRegex(k, \"[a-zA-Z0-9+\\/]+={0,2}\")", "def _tokey(self, keys: Union[str, Iterable]):\n if hasattr(keys, \"encode\"): # str\n return keys.encode(\"utf-8\")\n elif hasattr(keys, \"decode\"): # bytes\n return keys\n return (self.Sep.join(keys).encode(\"utf-8\"))", "def get(self, key):\n return \"\"", "def _ci_key(self, key: str) -> str:\n # pylint: disable=no-self-use\n return key.lower()", "def _ci_key(self, key: str) -> str:\n # pylint: disable=no-self-use\n return key.lower()", "def entry(from_code, key):\n # turn code to hexadecimal\n from_code = DC.uniToHex(from_code)\n\n en = DESEncode()\n string_len = len(from_code)\n\n if string_len < 1:\n print 'error input'\n return False\n key_code = en.encode(from_code, key, string_len)\n return key_code", "def prep_json_for_BQ_callback(self, key):\n # add _ if first character in key is numeric\n if key[0].isnumeric():\n key = \"_\" + key\n # replace \".\" or \" \" with \"_\"\n key = re.sub(r\"[. ]\",\"_\",key)\n # replace non-alphanumeric, non-\"_\" characters with nothing\n key = re.sub(r\"[^a-zA-Z0-9_]\",\"\",key)\n return key", "def create_key(input, **kwargs):\n input = re.compile(r\"\\d+\").sub(_repl, input)\n input = input.replace(':', 'c')\n input = input.replace(';', 'c')\n input = input.replace('_', 'u')\n return re.sub('[^a-zA-Z]+', '', str(input)).lower()", "def normalise_key(self, key):\n key = key.replace('-', '_')\n if key.startswith(\"noy_\"):\n key = key[4:]\n return key", "def encode(key: str, clear: str) -> str:\n\n enc = []\n for i in range(len(clear)):\n key_c = key[i % len(key)]\n enc_c = chr((ord(clear[i]) + ord(key_c)) % 256)\n enc.append(enc_c)\n return base64.urlsafe_b64encode(\"\".join(enc).encode()).decode()", "def replace(letter, key):\n return letter", "def make_external_key(self, data):\n return data['key']", "def fullkey(self, key):\n if len(self.basekey) > 0:\n return \"{}:{}\".format(self.basekey, key)\n else:\n return key", "def create_key ():", "def test_key_str(self):\n key = Key({\"warning\": False, \"inCar\": True})\n\n string = str(key)\n assert isinstance(string, str)\n assert string == \"{'warning': False, 'in_car': True}\"", "def test_invalid_chars_ssck(self):\r\n valid_base = SlashSeparatedCourseKey(u'org.dept-1%2', u'course.sub-2%3', u'run.faster-4%5')\r\n for key in SlashSeparatedCourseKey.KEY_FIELDS:\r\n with self.assertRaises(InvalidKeyError):\r\n # this ends up calling the constructor where the legality check should occur\r\n valid_base.replace(**{key: u'funny thing'})", "def test_specialchar(self):\n form_data = self.form_data('vNzwXpzKJyTshvHsuULn')\n form = self.form(data=form_data, user=self.u)\n self.assertFalse(form.is_valid())", "def substitution(plainText, key):\n return plainText", "def _GetKeyString(self):\n return self.__key_string", "def __GetKeyString(self):\n return self._GetKeyString()", "def clean_as_inchikey(self):\n regexp = r\"[A-Z]{14}-[A-Z]{10}-[A-Z]\"\n found = re.search(regexp, self.dirty)\n if found is None:\n self.cleaned = \"\"\n else:\n self.cleaned = found[0]", "def __to_key(name: str) -> str:\n return name.replace(\" \", \"-\")", "def pass_key(self) -> str:\n return pulumi.get(self, \"pass_key\")", "def _serializeKey(entityId, key):\n Identifier.checkIdentifier(key)\n return \"%s\\x1D%s\" % (entityId, key)", "def key_as_variable(self):\n if re.search(r'\\W', self.key) or self.treat_as_expression:\n return 'h{hash}'.format(hash=hashlib.md5(self.key.encode('UTF-8')).hexdigest()[:8])\n else:\n return 'k{key}'.format(key=self.key)", "def ReadKeys(self):\n\n reg = re.compile(r\"\\w|\\s\")\n chars = \"\"\n while True:\n key = getch()\n keynum = ord(key)\n\n if keynum == 27: #escape\n self.shouldExit = True\n return \"\"\n\n if keynum == 13: #enter\n stdout.write(\"\\n\")\n break\n\n if keynum == 8: #backspace\n chars = chars[:-1]\n stdout.write(key)\n stdout.write(\" \")\n stdout.write(key)\n continue\n\n if reg.match(key): \n chars += key\n stdout.write(key)\n\n return chars", "def modifier_key(modifier: str, key: str):\n res = \"-\".join([modifier, str(key)])\n actions.key(res)", "def _translate_keyname(inp):\n convert = {'Equal': '=', 'Escape': 'Esc', 'Delete': 'Del', 'Return': 'Enter',\n 'Page_up': 'PgUp', 'Page_down': 'PgDn'}\n if inp in convert:\n out = convert[inp]\n else:\n out = inp\n return out", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"key\")", "def key_join(self, key, encode=True):\n if isinstance(key, str):\n parts = key.split('/')\n else:\n parts = key\n new_parts = []\n\n for part in parts:\n if isinstance(part, bytes):\n part = part.decode(\"utf-8\")\n if encode:\n part = quote(str(part))\n new_parts.append(part)\n\n return '/'.join(new_parts)", "def ctrl_k(self):\n self.string = self.string[0:self.index]", "def _get_raw_key(self, key_id):", "def getkeyname(cls, title):\n return re.sub(r'[^a-zA-Z0-9-]', '_', title.strip().lower())", "def correct_key(goodkey,code):\n db = pybtex.database.parse_string(code,\"bibtex\")\n keys = [key for key in db.entries.keys()]\n badkey = keys[0]\n return code.replace(badkey,goodkey)" ]
[ "0.7182007", "0.7139797", "0.70063776", "0.70063776", "0.6996293", "0.69241786", "0.69132024", "0.6771149", "0.6679707", "0.64687204", "0.64204514", "0.6416963", "0.6211138", "0.6207948", "0.6200801", "0.61796457", "0.6146467", "0.6136251", "0.6111352", "0.6108621", "0.6090827", "0.6075582", "0.6071378", "0.6068027", "0.6057765", "0.6042672", "0.60361", "0.6032412", "0.6026513", "0.60238075", "0.6019935", "0.59983784", "0.5974494", "0.5960339", "0.5958187", "0.59456444", "0.59355617", "0.5929213", "0.5927883", "0.5915218", "0.5889476", "0.5882605", "0.58816063", "0.58744216", "0.58742374", "0.5870283", "0.58640885", "0.5863498", "0.58578587", "0.58515817", "0.58351094", "0.58319277", "0.58295065", "0.5816486", "0.58160913", "0.5811304", "0.58081067", "0.5807406", "0.5807406", "0.58000183", "0.5779292", "0.577805", "0.576736", "0.5766395", "0.5741368", "0.5727903", "0.572203", "0.5709634", "0.5706638", "0.5695401", "0.56885463", "0.5686424", "0.56864107", "0.56773037", "0.56694615", "0.56670576", "0.5662882", "0.56520015", "0.5640904", "0.56361294", "0.5628218", "0.56234616", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56187636", "0.56163603", "0.56132096", "0.5611237", "0.561117", "0.5608174" ]
0.0
-1
Send null value in OrderId field
def test_04(self): assert 'False' == Api.requestBlock('test-04', charOrder='')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return self.order_id", "def save(self, *args, **kwargs):\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def save(self, *args, **kwargs):\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def order_id(self) -> str:\n return pulumi.get(self, \"order_id\")", "def __str__(self):\n\n return f'{self.order_id}'", "def order_id(self, order_id):\n\n self._order_id = order_id", "def order_id(self, order_id):\n\n self._order_id = order_id", "def order_id(self, order_id):\n\n self._order_id = order_id", "def get_orderId(self):\n return self.order_id", "def nextValidId(self, orderId):\n if getattr(self, '_my_orderid_data', None) is None:\n ## getting an ID which we haven't asked for\n ## this happens, IB server just sends this along occassionally\n self.init_nextvalidid()\n\n self._my_orderid_data.put(orderId)", "def validate_order_id(self, value):\n\n if not Order.objects.filter(order_id=value).exists():\n raise ValidationError(f'Order with id {value} does not exist.')\n order_obj = Order.objects.get(order_id=value)\n if order_obj.assign_time is None:\n raise ValidationError(f'Order with id {value} was not assigned to any courier.')\n if order_obj.complete_time is not None:\n raise ValidationError(f'Order with id {value} has already been completed.')\n return value", "def post_order(self, order):\n url = self.build_url(\"orders/\")\n res = post(url, json=order)\n if res.ok:\n return res.json()[\"id\"]\n return None", "def get_orderId(self):\n return self.metadata['orderId']", "def fill_order(self, order: Order) -> None:\n order = self.get_order_by_id(order.id)\n order.status = OrderStatus.FILL", "def retrieve_order_by_payment_type_none(self, customer_id):\n with sqlite3.connect('bangazon.db') as conn:\n c = conn.cursor()\n c.execute(\"SELECT * FROM Orders WHERE customer_Id = '{}' AND payment_type_Id is null\".format(customer_id))\n customer_order = c.fetchall()\n return customer_order", "def test_add_none_field(self):\n user_id = get_rand_string()\n data = get_rand_string()\n id = get_rand_string()\n\n doc = {}\n doc[\"user_id\"] = user_id\n doc[\"data\"] = data\n doc[\"id\"] = id\n doc[\"num\"] = None\n\n self.conn.add(**doc)", "def _prepare_invoice(self):\n self.ensure_one()\n result = super(SaleOrder, self)._prepare_invoice()\n result.update({\n 'cost_center_id': self.cost_center_id and self.cost_center_id.id or False\n })\n return result", "def get_orderno(self):\n WebDriverWait(self.driver, 20).until(EC.visibility_of_element_located((By.CSS_SELECTOR,'#order-no')))\n order_id=self.driver.find_element_by_css_selector('#order-no').text\n return order_id", "def client_order_id(self, client_order_id):\n\n self._client_order_id = client_order_id", "def test_update_order_with_no_status(self):\n response = self.api_test_client.put('{}/orders/1'.format(\n self.BASE_URL), json={})\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response_as_json(\n response)['message'], 'Bad request. Missing required param')", "def get_order(self, walletId, orderId):\n return", "def order_item_id(self, order_item_id):\n if order_item_id is None:\n raise ValueError(\"Invalid value for `order_item_id`, must not be `None`\")\n\n self._order_item_id = order_item_id", "def testNoneValue(self):\n objectID = uuid4()\n user = createUser(u'username', u'password', u'User',\n u'user@example.com')\n namespace = createNamespace(user, u'name')\n tag = createTag(user, namespace, u'tag')\n self.store.add(TagValue(user.id, tag.id, objectID, None))", "def SaveOrder(self, order, tenantId, userId):\n\t\tif order:\n\t\t\tif order[\"Id\"]:\n\t\t\t\torderid = order[\"Id\"]\n\t\t\t\to = self.GetOrderById(orderid, tenantId)\n\t\t\t\tif o:\n\t\t\t\t\t#o.TenantId = tenantId\n\t\t\t\t\to.CustomerId = order[\"CustomerId\"]\n\t\t\t\t\to.OrderAmount = order[\"OrderAmount\"]\n\t\t\t\t\to.PaidAmount = order[\"PaidAmount\"]\n\t\t\t\t\to.IpAddress = order['IpAddress']\n\t\t\t\t\tif order['DueDate'] and len(order['DueDate']) > 0:\n\t\t\t\t\t\to.DueDate = dateutil.parser.parse(order['DueDate'])\n\t\t\t\t\tif order['OrderDate'] and len(order['OrderDate']) > 0:\n\t\t\t\t\t\to.OrderDate = dateutil.parser.parse(order['OrderDate'])\n\t\t\t\t\to.UpdatedBy = userId\n\t\t\t\t\to.UpdatedOn = datetime.utcnow()\n\n\t\t\t\t\tlineitems = order[\"LineItems\"]\n\t\t\t\t\tif lineitems:\n\t\t\t\t\t\to.LineItemsCount = len(lineitems)\n\t\t\t\t\t\to.OrderAmount = sum([x[\"SellPrice\"] * x[\"Quantity\"] for x in lineitems])\n\t\t\t\t\t\tDBSession.query(LineItem).filter(LineItem.OrderId == orderid).delete()\n\t\t\t\t\t\tself.SaveOrderLineItems(o.Id, lineitems)\n\t\t\t\t\telse:\n\t\t\t\t\t\to.LineItemsCount = 0\n\t\t\t\t\t\to.OrderAmount = 0\n\t\t\t\t\t\tDBSession.query(LineItem).filter(LineItem.OrderId == orderid).delete()\n\n\t\t\t\t\tpayments = order[\"Payments\"]\n\t\t\t\t\tif payments:\n\t\t\t\t\t\to.PaidAmount = sum([x[\"PaidAmount\"] for x in payments])\n\t\t\t\t\t\tDBSession.query(OrderPayment).filter(OrderPayment.OrderId == orderid).delete()\n\t\t\t\t\t\tself.SaveOrderPayments(o.Id, payments, userId)\n\t\t\t\t\telse:\n\t\t\t\t\t\to.PaidAmount = 0\n\t\t\t\t\t\tDBSession.query(OrderPayment).filter(OrderPayment.OrderId == orderid).delete()\n\t\tpass", "def __init__(self, mode, _id=None, order_dict=None, ):\n if _id:\n if type(_id) == str:\n _id = ObjectId(_id)\n self.order = orders_table.find_one(\n {\"_id\": _id})\n # self.deleted = True if self.order else False\n if order_dict:\n self.order = order_dict\n # self.order = orders_table.find_one(\n # {\"_id\": order_dict[\"_id\"]})\n # self.deleted = True if self.order else False", "def __init__(self, order_id):\n self.order_items = []\n self.order_id = order_id", "def test_cancel_order_with_order_id():\n\n client = Client(key, secret)\n response = client.cancel_oco_order(**params)\n response.should.equal(mock_item)", "def cancelOrder(self, order_number):\n pass", "def order_w_order_id(order_id):\n # Megnyutjuk a kapcsolatot\n conn = get_db()\n try:\n # Keszitunk egy cursort\n cur = conn.cursor()\n try:\n # Ezt a parameteres SQL lekerdezest hajtjuk vegre, mellyel megkapjuk az adott\n # order_id-ju megrendelest.\n cur.execute('SELECT description, vehicle_type, quantity, origin, destination,' +\n ' order_date, deadline_date, comment_text FROM orders WHERE' +\n ' order_id = :order_id', order_id=order_id)\n # Ebben a valtozoban lesz az eredmenytabla egyetlen\n # sora (Biztosan 1 lesz, mert az order_id egyedi)\n result = cur.fetchone()\n # Ha nem talaltunk ilyen megrendelest, szolunk a felhasznalonak\n if result is None:\n abort(404)\n else:\n # 2. feladat - lekerdezzuk az adott orszag valutajat\n #\n # Az origin illetve destination mezokben megkeressuk az orszag betujelet\n # Ez mindig a string vegen, ( es ) jelek kozott allo 2 betu.\n # Mivel ezek nagybetuvel irodtak at kell konvertalnunk kisbeture.\n # Ezek futtatjuk a kerest, majd a kapott eredmenyt JSON formatumra parsoljuk.\n # Ebbol kiolvassuk a valuta erteket, amit majd atadunk a kimeneti mezonknek.\n origin001 = result[3]\n origin_len = len(origin001)\n origin_tmp = origin001[origin_len-3:origin_len-1]\n origin_url = \"http://rapid.eik.bme.hu:9080/currency_ws/currencies/\" + origin_tmp.lower() + \".json\"\n r1 = requests.get(origin_url)\n var1 = r1.json()\n origin_currency = var1['currency']\n \n destination001 = result[4]\n destination_len = len(destination001)\n destination_tmp = destination001[destination_len-3:destination_len-1]\n destination_url = \"http://rapid.eik.bme.hu:9080/currency_ws/currencies/\" + destination_tmp.lower() + \".json\"\n r2 = requests.get(destination_url)\n var2 = r2.json()\n destination_currency = var2['currency']\n # Visszaterunk a JSON formatumu dictionary-vel,\n # ami mindent a megfelelo formatumban tarol\n return jsonify({\"description\": result[0],\n \"vehicle_type\": result[1],\n \"quantity\": result[2],\n \"origin\": result[3],\n \"destination\": result[4],\n \"order_date\": result[5].date().isoformat(),\n \"deadline_date\": result[6].date().isoformat(),\n \"comment_text\": result[7],\n \"origin_currency\": origin_currency,\n\"destination_currency\": destination_currency})\n finally:\n cur.close()\n finally:\n conn.close()", "def get_order_number(self):\n return self.__order_number", "def cancel_order(self, walletId, orderId):\n return", "def order_item_id(self):\n return self._order_item_id", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _set_id(self, value):\n pass", "def test_update_field_to_null(self, field, field_name):\n control = factories.ControlFactory()\n\n response = self.api.put(control, control.id, {field: None})\n\n self.assert400(response)\n self.assertEqual(response.json[\"message\"],\n field_name + \" for the object is not specified\")\n control = db.session.query(all_models.Control).get(control.id)\n self.assertIsNotNone(control.external_id)", "def save(self, *args, **kwargs):\n self.order_total = self.membership.price\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def setOrder(self, order):\n\t\tself.orderInData = order", "def order(self, order_id, symbol, **kwargs):\n pass", "def set_AWSMerchantId(self, value):\n super(ListOrdersInputSet, self)._set_input('AWSMerchantId', value)", "def format_order(order):\n if order is None:\n return ''\n else:\n return 'Ordered by ' + ORDER[order]", "def test_make_order_with_a_missing_field(self):\n\n\t\tres = self.login_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\torder_data = {\n\t\t\t\t\t\"owner\": \"Pemwa\",\n\t\t\t\t\t\"meal_name\": \"pizza\"\n\t\t\t\t\t }\n\n\t\tresponse = self.client().post(\n\t\t\t'/api/v2/orders',\n\t\t\theaders={\"x-access-token\": access_token},\n\t\t\tdata = json.dumps(\n\t\t\t\torder_data) , content_type = 'application/json')\n\t\tresult = json.loads(response.data)\n\t\tself.assertEqual(result[\"message\"], \"Missing argument\")\n\t\tself.assertEqual(response.status_code, 400)", "def _prepare_add_missing_fields(self, values):\n res = {}\n onchange_fields = ['name', 'price_unit', 'product_uom', 'tax_id']\n if values.get('order_id') and values.get('product_id') and any(f not in values for f in onchange_fields):\n line = self.new(values)\n line.product_id_change()\n for field in onchange_fields:\n if field not in values:\n res[field] = line._fields[field].convert_to_write(line[field], line)\n res['init_qty'] = values.get('product_uom_qty')\n _logger.debug(\"********************* dropship_portal\\sale_order res **********************: %r\", res)\n return res", "def test_make_order_with_some_data_as_empty_str(self):\n response = self.api_test_client.post('{}/orders'.format(\n self.BASE_URL), json={\n 'item_name': 'Watermelon', 'item_price': 200, 'quantity': ''\n }, headers={'Content-Type': 'application/json'})\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response_as_json(\n response)['message'],\n 'Bad request. Price and quantity must be ints >= 1')", "def __init__(self, *args, **kwargs):\n self._orders = None\n super().__init__(*args, **kwargs)", "def get_raw_hash(cls, order_item):\n obj = copy.deepcopy(order_item)\n obj.order_id = None\n obj.order_version = None\n raw_order_id = yeti_utils_common.generate_id_md5_digit_20_for_object(obj)\n return raw_order_id", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def get_prep_value(self, value):\n if (value is UNKNOWN) or (value is ''):\n # If Django tries to save an empty string, send the db None (NULL).\n return None\n else:\n # Otherwise, just pass the value.\n return value", "def _idxs_postformat_null(self):\n pass", "def getid_saveifneeded(self):\n #if (not hasattr(self,'id') or self.id == None):\n if (self.id == None):\n self.save()\n return self.id", "def get_specific_order(self,order_id):\n self.query = \"SELECT * FROM orders WHERE order_id=%s\"\n self.input = (order_id,) #tuple to support indexing\n self.query_1 = \"SELECT order_id FROM orders ORDER BY order_id DESC LIMIT 1.\"\n self.event = \"admin_get_specific_order\"\n self.error = \"Invalid order id\"\n self.message = \"Successfully fetched the order.\"\n self.order_id = order_id\n self.db_error = None", "def orderInfo(self, orderInfo):\r\n\r\n self._orderInfo = orderInfo", "def _create_id_column(df):\n ids = pd.Series(range(len(df)), dtype=object, index=df.index)\n ids[df[\"group\"].isin([None, False, np.nan, \"\"])] = None\n return ids.astype(str)", "def ProcessOrder(product_id):\n product = Product.query.filter_by(product_id = product_id)\n \n if (product):\n product.qty = product \n db.session.commit()", "def set_missing_id(self, data, **kwargs):\n if not data.get(\"project_id\"):\n data[\"project_id\"] = lambda: uuid.uuid4().hex\n\n return data", "def get_one_order():", "def SetOrder(self, order):\n if self.__order != order:\n self.__order = order\n self.Modified()", "def get(self, order_id):\n order = db.session.query(models.Order).filter_by(id=order_id).first()\n if order is None:\n return 'Order does not exist', 404\n return order.serialize(), 200", "def get_order_guid(order_id):\n return linnapi.orders.get_order_guid_by_order_id(order_id)", "def create_order(request, order, transaction_id):\n\n\n order.transaction_id = transaction_id\n print transaction_id\n #order.ip_address = request.META.get('REMOTE_ADDR')\n order.user = None\n #if request.user.is_authenticated():\n # order.user = request.user\n order.status = Order.SUBMITTED\n\n DBSession.add(order)\n\n\n if order:\n \"\"\" if the order save succeeded \"\"\"\n cart_items = cart.get_cart_items(request).all()\n print \"The items in the cart are: \", len(cart_items)\n\n for ci in cart_items:\n \"\"\" create order item for each cart item \"\"\"\n\n print \"The product is \", ci.product\n oi = OrderItem()\n oi.order_id = order.id\n oi.order = order\n oi.quantity = ci.quantity\n print \"The product id is \", ci.product.id\n oi.product_id = ci.product.id\n oi.product = ci.product\n\n oi.price = ci.price # now using @property\n DBSession.add(oi)\n\n # all set, clear the cart\n cart.empty_cart(request)\n\n ## save profile info for future orders\n #if request.user.is_authenticated():\n # from ecomstore.accounts import profile\n #\n # profile.set(request)\n\n return order", "def _on_order_not_found(self, msg):\r\n parts = msg[\"id\"].split(\":\")\r\n oid = parts[1]\r\n self.debug(\"### got 'Order not found' for\", oid)\r\n # we are now going to fake a user_order message (the one we\r\n # obviously missed earlier) that will have the effect of\r\n # removing the order cleanly.\r\n fakemsg = {\"user_order\": {\"oid\": oid, \"reason\": \"requested\"}}\r\n self._on_op_private_user_order(fakemsg)", "def payment_id_leading(self) -> bool:", "def reject_order(self, order: Order) -> None:\n order = self.get_order_by_id(order.id)\n order.status = OrderStatus.REJECT", "def create_order_in_database(self, customer_id):\n with sqlite3.connect('bangazon.db') as conn:\n c = conn.cursor()\n c.execute(\"insert into Orders values (null, '{}', null)\".format(customer_id))\n conn.commit()", "def fillOrder(self,orderID=None,order=None):\n\t\t#locate where the order resides\n\t\tif order is not None:\n\t\t\torderID=order.ID\n\t\tside=self.Stack[orderID].Side\n\t\tamount=0\n\t\t#calculate the amount to deposit basing on the side of the order\n\t\tif side is 'sell':\n\t\t\tamount=self[orderID].Price*self.Stack[orderID].Amount\n\t\telse:\n\t\t\tamount=self[orderID].Amount\n\t\t#deposit to the opposite account, if buy order, deposit into sell account,and vice\n\t\tside=self.invertSide(side)\n\t\tself[orderID].Account.deposit(side,amount)\n\t\t#Take order off the stack\n\t\tprint(\"\\n\"+order.Side.upper()+\" order \"+str(self[orderID].ID)+\" of \"+ str(self[orderID].Price) +\" has been filled\")\n\t\tself.destroyOrder(orderID)", "def order_code(self, order_code):\n\n self._order_code = order_code", "def getDbIntNone(self, db, key):\n val = self.getDbStrNone(db, key)\n if val != None:\n return int(val)\n else:\n return None", "def test_make_order_without_any_request_data(self):\n response = self.api_test_client.post('{}/orders'.format(\n self.BASE_URL), json={}, headers={\n 'Content-Type': 'application/json'})\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response_as_json(\n response)['message'], 'Bad request. Missing required param')", "def make_order_active(self, active_customer_id):\n active_order = self.retrieve_order_by_payment_type_none(active_customer_id)\n if active_order == []:\n self.create_order_in_database(active_customer_id)\n active_order = self.retrieve_order_by_payment_type_none(active_customer_id)\n active_order_pk = active_order[0][0]\n return active_order_pk\n else:\n active_order_pk = active_order[0][0]\n return active_order_pk", "def quote_id(self) -> Optional[str]:\n return pulumi.get(self, \"quote_id\")", "def do_cancel(order):\r\n self.gox.cancel(order.oid)", "def order_created(order_id):\n order = Order.objects.get(id=order_id)\n sg = sendgrid.SendGridAPIClient(apikey=SENDGRID_API_KEY)\n subject1 = 'ะ—ะฐะผะพะฒะปะตะฝะฝั ะท ะผะฐะณะฐะทะธะฝัƒ ะœะตะฑะปั–-ะ›ะตะผ'\n message = 'ะ’ะฐัˆะต ะทะฐะผะพะฒะปะตะฝะฝั ะพั„ะพั€ะผะปะตะฝะพ. ะะพะผะตั€ ะฒะฐัˆะพะณะพ ะทะฐะผะพะฒะปะตะฝะฝั โ„– {}'.format(order.id)\n message += '\\n ะ— ะ’ะฐะผะธ ะทะฒ\\'ัะถะตั‚ัŒัั ะผะตะฝะตะดะถะตั€ \\n\\n ะ— ะฟะพะฒะฐะณะพัŽ, ะผะฐะณะฐะทะธะฝ \"ะœะตะฑะปั–-ะ›ะตะผ\"'\n content = Content(\"text/plain\", message)\n from_email = Email(ADMIN_EMAIL)\n to_email = Email(order.email)\n\n # subject2 = 'ะŸะพัั‚ัƒะฟะธะปะพ ะ—ะฐะผะพะฒะปะตะฝะฝั (ะœะตะฑะปั–-ะ›ะตะผ)'\n # message_admin = 'ะ—ะฐะผะพะฒะฝะธะบ {0} {1} ะท {2} \\n ะพั„ะพั€ะผะธะฒ ะทะฐะผะพะฒะปะตะฝะฝั โ„– {3}'.format(order.first_name,\n # order.last_name, order.address,\n # order.id)\n # message_admin += '\\n ะขะตะปะตั„ะพะฝ ะทะฐะผะพะฒะฝะธะบะฐ {}'.format(order.phone)\n # content_admin = Content(\"text/plain\", message_admin)\n mail = Mail(from_email, subject1, to_email, content)\n response = sg.client.mail.send.post(request_body=mail.get())\n\n return response", "def uuid(self, value):\n if value is not None:\n self.keystore['id'] = value\n elif 'id' in self.keystore:\n self.keystore.pop('id')", "def save(self, *args, **kwargs):\n if not self.tracking_number:\n self.tracking_number = self._generate_tracking_number()\n super().save(*args, **kwargs)", "def default_get(self, cr, uid, fields, context=None):\n\t\tif not context:context={}\n\t\tres = super(sale_order_delivery_wizard, self).default_get(cr, uid, fields, context=context)\n\n\t\tif 'active_id' in context:\n\t\t\torder_id = context.get('active_id', False)\n\t\t\tif order_id:\n\t\t\t\torder = self.pool.get('sale.order').browse(cr,uid,order_id,context)\n\t\t\t\tres['name']=order.name\n\t\t\t\tres['order_id']=order.id\n\t\t\t\tres['order_line']=[]\n\t\t\t\tres['delivery_date']=time.strftime('%Y-%m-%d')\n\t\t\t\tfor line in order.order_line:\n\t\t\t\t\tres['order_line'].append((0,0,{\n\t\t\t\t\t\t\"sequence_line\":line.sequence_line,\n\t\t\t\t\t\t\"name\":line.name or (line.product_id and line.product_id.name) or \"-\",\n\t\t\t\t\t\t\"line_id\":line and line.id,\n\t\t\t\t\t\t\"product_id\":line.product_id and line.product_id.id or False,\n\t\t\t\t\t\t\"product_qty\":line.product_uom_qty,\n\t\t\t\t\t\t\"product_uom\":line.product_uom and line.product_uom.id or False,\n\t\t\t\t\t\t\"product_uos_qty\":line.product_uos_qty,\n\t\t\t\t\t\t\"product_uos\":line.product_uos and line.product_uos.id or False,\n\t\t\t\t\t\t}))\n\t\treturn res", "def order_num(self):\n return self._order_number", "def set_cancelled_order(self):\n self.set_values(\n start_phrase='Cancelled Orders',\n end_phrase=None,\n start_with=2,\n end_until=-1,\n prop_keys=self.cancelled_order_keys,\n prop_name='cancelled_order'\n )\n\n self.cancelled_order = map(self.del_empty_keys, self.cancelled_order)\n self.fillna_dict_with_exists(\n self.cancelled_order,\n 'time_cancelled',\n ('time_cancelled', 'spread', 'order', 'tif', 'status')\n )\n self.replace_nan(self.cancelled_order)\n\n self.convert_type(self.cancelled_order, 'time_cancelled', self.convert_datetime, 0)\n self.convert_type(self.cancelled_order, 'quantity', int, 0)\n self.convert_type(self.cancelled_order, 'strike', float, 0.0)\n self.convert_type(self.cancelled_order, 'price', float, 0.0)\n self.convert_type(self.cancelled_order, 'expire_date', str, '')", "def on_order(self, order: OrderData):\n pass", "def on_order(self, order: OrderData):\n pass", "def on_order(self, order: OrderData):\n pass", "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def to_id(self):\n return \"%s%s%s%s%s\" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,\n self.start_time,\n NoOpTraceId.DELIMITER, self.__number)", "def __init__(self, order_details: OrderDetails):\n self.details = order_details", "def test_id_type_none(self):\n obj = Base(None)\n self.assertTrue(obj.id is 1)", "def _generate_order_id():\n current_milli_time = str(int(round(time.time())))\n rand_str = random_string_generator()\n\n return '%s%s' % (rand_str, current_milli_time)", "def __str__(self):\r\n # If the original value is None, represent this as 'NULL'\r\n if self.original is None:\r\n return 'NULL'\r\n return str(self.original)", "def before_update(mapper, conn, target):\n if not target.id_:\n dataset = ObjectNumber.parse(target.d_id)\n target.id_ = str(PartitionNumber(dataset, target.sequence_id))", "def set_order(self, order):\n self.order = order", "def set_order(self, order):\n self.order = order", "def emptyGroupementPk(heb):\n heb.heb_groupement_pk = None\n heb.update()", "def test_id_no_value(self):\n self.line._parse_event_swimmer_id(\" \")\n self.assertEqual(None, self.line.event_swimmer_id)", "def test_get_specific_order(self):\n # Test with wrong parcel id\n # Correct format but not there\n response = self.client.get(\n 'api/v1/parcels/24034', headers=self.user_token_dict)\n data = json.loads(response.data)\n self.assertEqual(\n data, {'message': 'No Parcel delivery order with that id'})\n self.assertEqual(response.status_code, 400)\n # Test with wrong parcel id format\n response = self.client.get(\n 'api/v1/parcels/24034u', headers=self.user_token_dict) # Incorrect id format\n data = json.loads(response.data)\n self.assertEqual(data, {'message': 'Wrong id format'})\n self.assertEqual(response.status_code, 400)", "def test_fetch_specific_order_when_does_not_exist(self):\n response = self.api_test_client.get(\n '{}/orders/100'.format(self.BASE_URL))\n self.assertEqual(response.status_code, 404)\n self.assertEqual(\n 'Order with id 100 not found', response_as_json(\n response)['message'])", "def _check_create_order(self, **kwargs):\n error_info = None\n for key in self.NOT_NULL_ORDER_DATA:\n if not kwargs.get(key):\n error_info = self.NOT_NULL_ORDER_DATA.get(key)\n break\n\n if error_info:\n raise WechatKitException(error_info)\n\n trade_type = kwargs.get('trade_type')\n\n if trade_type == self.PAYMENT_JS:\n if not kwargs.get('openid'):\n raise WechatKitException('็”จๆˆทๆ ‡่ฏ†ไธ่ƒฝไธบ็ฉบ')\n elif trade_type == self.PAYMENT_NATIVE:\n if not kwargs.get('product_id'):\n raise WechatKitException('ๅ•†ๅ“IDไธ่ƒฝไธบ็ฉบ')", "def __clean_orders(self):\n canceled_id = []\n for order_id, order in self.orders_dict.items():\n if order[\"status\"] == \"canceled\":\n canceled_id.append(order_id)\n for id in canceled_id:\n del self.orders_dict[id]", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")" ]
[ "0.6216553", "0.6184329", "0.6184329", "0.618138", "0.59689295", "0.5936228", "0.5936228", "0.5936228", "0.5925615", "0.57500046", "0.5691294", "0.5583406", "0.55813485", "0.54496884", "0.5350011", "0.52729756", "0.52557564", "0.523138", "0.52178276", "0.51731664", "0.51521206", "0.513243", "0.5065883", "0.5064389", "0.50552607", "0.5043103", "0.50352144", "0.5012607", "0.4966336", "0.49631912", "0.49627823", "0.49467558", "0.49381942", "0.49381942", "0.49381942", "0.4936492", "0.4923498", "0.49205402", "0.4920317", "0.49195668", "0.49175864", "0.49161884", "0.49132356", "0.48826617", "0.48723975", "0.4867815", "0.48538205", "0.48372293", "0.48226193", "0.48160928", "0.4815809", "0.48101583", "0.48003247", "0.479891", "0.47783667", "0.4768227", "0.47675332", "0.47672448", "0.47621077", "0.47598088", "0.4752595", "0.47455847", "0.47362745", "0.47292438", "0.47179538", "0.47179005", "0.47094372", "0.4706203", "0.46991533", "0.46948218", "0.4693847", "0.46774516", "0.4670573", "0.4669386", "0.46646392", "0.4664281", "0.4662952", "0.46620676", "0.46524355", "0.46524355", "0.46524355", "0.4651993", "0.46477973", "0.4646102", "0.46337783", "0.4630542", "0.46297413", "0.46274665", "0.46265143", "0.46265143", "0.4625415", "0.4619469", "0.4613243", "0.46126187", "0.461113", "0.4605225", "0.46043575", "0.46043575", "0.46043575", "0.46043575", "0.46043575" ]
0.0
-1
Check length of OrderId fields (len = 50)
def test_05(self): assert 'True' == Api.requestBlock('test-05', charOrder=50)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validateID(id):\n\n if re.compile('[0-9]+').match(id) == None:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s' is not a valid Id. ID should be numeric with Length = '%s' \" \n\t\t\t% (id, lib.constants._ATTR_ID_LENGHT)))\n return -1\n else:\n # Check for the lenght \n counter = 0\n for char in id:\n counter += 1\n print counter , lib.constants._ATTR_ID_LENGHT\n if counter > lib.constants._ATTR_ID_LENGHT :\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s' exceeded the given length i.e Max Length = '%s'\" % \n\t\t\t(id, lib.constants._ATTR_ID_LENGHT)))\n return -1\n else:\n return 0\n return 0", "def test_get_shortuuid_specific_length(self):\n id = get_shortuuid(length=10)\n self.assertTrue(len(id) == 10)", "def is_order_id_valid(self):\n \n if not self.order_id:\n self.error_message = jsonify({'status':'error', 'message': 'orderId parameter missing'})\n return False\n if not re.match('^[a-f0-9]{32}$', self.order_id):\n self.error_message = jsonify({'status': 'error', 'message': 'orderId must be set to (hex) UUID'})\n return False\n return True", "def LengthTest(arr):\n\tif len(arr) == 8:\n\t\treturn True;\n\telif len(arr) == 7:\n\t\treturn IsMissingField('cid', arr)\n\telse:\n\t\treturn False", "def max_length(verifield, required):\n if verifield is None: return True\n return len(verifield) <= required", "def check_len( string_key ) : \r\n\r\n if len( string_key ) != 4 :\r\n\r\n raise Eggog( \"'%s': EGI wants the key to be exactly four characters!\" % (string_key, ) ) \r\n \r\n else :\r\n \r\n return True", "def __len__(self):\n return len(self.token2id)", "def min_length(verifield, required):\n if verifield is None: return True\n return len(verifield) >= required", "def test_length(self):\n form_data = self.form_data('c897B$eH@')\n form = self.form(data=form_data, user=self.u)\n self.assertFalse(form.is_valid())", "def payment_id_lengths(self) -> Set[int]:", "def _matchLength(self, length: int):\n return self._comparator['Length'] < length", "def check_length(length):\n if length > lengthLimit:\n err_str = \"The length value (%s) is higher than the \" % (length)\n err_str += \"limit length (%s)\" % (lengthLimit)\n raise ValueError(err_str)", "def test_client_email_max_length(self):\n request = Request.objects.get(id=1)\n max_length = request._meta.get_field('client_email').max_length\n self.assertEquals(max_length, 100)", "def __init__(self, length=DEFAULT_ID_LENGTH, excluded_chars=DEFAULT_EXCLUDED_CHARS):\n self.id_length = length\n self.excluded_chars = excluded_chars", "def validateIOmoduleId(output ,arg_dict , key):\n id = arg_dict[key]\n counter = 0\n for char in id:\n counter += 1\n if re.compile('[0-9]+').match(char[0]) == None:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s'='%s' is not a valid Id. \\n ID should be numeric \" % (key,id))) \n return None\n if counter > lib.constants._ATTR_ID_LENGHT:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s'='%s' is not a valid Id. \\n ID should be numeric with Length = '%s' \" % (key,id, lib.constants._ATTR_ID_LENGHT)))\n return None\n return arg_dict", "def _check_field_length(self, field, value, options=None):\n options = options if options else field.GetOptions()\n for (option, setting) in options.ListFields():\n if option.name == \"length\":\n if self.__gt_by_type(value, setting):\n if hasattr(field, \"name\"):\n raise FieldTooLongException(\"The field '\" + field.name +\n \"' is bigger than the allowed \" + str(setting) + \" bytes\")\n else:\n raise FieldTooLongException(\"List element '\" + str(value) +\n \"' is bigger than the allowed \" + str(setting) + \" bytes\")", "def len12(self, len): # -> None:\n ...", "def _validate_ordering_customer_50K(self, val):\n return val", "def test_field_length_matches_data_type_field_length(self):\n for known_message_type in KNOWN_MESSAGE_TYPES:\n for field in known_message_type.fields:\n if self.is_verbose:\n print 'Checking length setup of field {0} in message {1}'.format(field.name, known_message_type.name)\n self.assertEqual(field.length, field.data_type.length)", "def _check_values_len(self, data_batch: Dict[str, List[str]]):\n values_len = [len(v) for _, v in data_batch.items()]\n unique_len = len(set(values_len))\n assert unique_len == 1, \"Length of values are not consistent across\"", "def test_client_ip_max_length(self):\n request = Request.objects.get(id=1)\n max_length = request._meta.get_field('client_ip').max_length\n self.assertEquals(max_length, 100)", "def validate_length(column_name, value, length):\n valuelength = len(value)\n if valuelength > int(length) >= 0:\n return \"{0} : value '{1}' is greater than the specified length {2}\".format(column_name, value, length)\n elif valuelength < int(length) and int(length) >= 0:\n return \"{0} : value '{1}' is less than the specified length {2}\".format(column_name, value, length)\n\n return None", "def validate_order_id(self, value):\n\n if not Order.objects.filter(order_id=value).exists():\n raise ValidationError(f'Order with id {value} does not exist.')\n order_obj = Order.objects.get(order_id=value)\n if order_obj.assign_time is None:\n raise ValidationError(f'Order with id {value} was not assigned to any courier.')\n if order_obj.complete_time is not None:\n raise ValidationError(f'Order with id {value} has already been completed.')\n return value", "def test_field_len(self):\n client = self.base_scenario(\n frang_config=\"http_field_len 300;\",\n requests=[f\"POST /1234 HTTP/1.1\\r\\nHost: localhost\\r\\nX-Long: {'1' * 320}\\r\\n\\r\\n\"],\n )\n self.check_response(\n client,\n status_code=\"403\",\n warning_msg=\"frang: HTTP (in-progress )?field length exceeded for\",\n )", "def _validate_ordering_customer_50A(self, val):\n return val", "def checkLength(ish, length):\n if ish == \"Visa\":\n ok = (13,16)\n elif ish == \"American Express\":\n ok = (15,)\n elif ish == \"MasterCard\":\n ok = (16,)\n elif ish == \"Discover/Novus\":\n ok = (16,)\n else:\n raise TypeError, \"unknown issuer\"\n return length in ok", "def mobile_len_validator(mobile):\n if len(mobile) != 13:\n raise ValidationError('Invalid mobile len')", "def checkPacketLength(self):\n return self.packetLength == len(self) - PRIMARY_HEADER_BYTE_SIZE - 1", "def nextValidId(self, orderId):\n if getattr(self, '_my_orderid_data', None) is None:\n ## getting an ID which we haven't asked for\n ## this happens, IB server just sends this along occassionally\n self.init_nextvalidid()\n\n self._my_orderid_data.put(orderId)", "def _validate_length(data, min, max, err): # lint-amnesty, pylint: disable=redefined-builtin\n if len(data) < min or len(data) > max:\n raise errors.AccountDataBadLength(err)", "def test_has_correct_length(self) -> None:\n assert len(list(ccc.MessageDataset())) == 138737", "def test_uid_max_length(self):\n\n field = self.image._meta.get_field(\"uid\")\n self.assertEqual(field.max_length, 64)", "def _validate_length(self, value):\n return (self.maximum_length is None) or (len(value) <= self.maximum_length)", "def test_field_len(self):\n client = self.base_scenario(\n frang_config=\"http_field_len 300;\",\n requests=[self.post_request + [(\"header\", \"x\" * 320)]],\n )\n self.check_response(\n client,\n status_code=\"403\",\n warning_msg=\"frang: HTTP (in-progress )?field length exceeded for\",\n )", "def check_id(self, id):", "def _requiredLength(self):\n if self.partner is not None:\n if self.partner.level == self.level:\n return len(self.partner)\n elif self.partner.level < self.level:\n return 1\n elif self._value is not None:\n return 1\n else:\n return 0", "def _check_max_length(self, p: list) -> bool:\n\n return (\n len(p[0].split(\" \")) < self.max_length\n and len(p[1].split(\" \")) < self.max_length\n )", "def test_prep_charfield_size(self):\n pass", "def len23(self, len): # -> None:\n ...", "def test_length(self):\n for length in range(2, 30):\n self.assertEqual(len(generate_password(length)), length)", "def test_neg_list_size_with_nonexistent_key(self):\n charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'\n minLength = 5\n maxLength = 30\n length = random.randint(minLength, maxLength)\n key = ('test', 'demo', ''.join(map(lambda unused:\n random.choice(charSet),\n range(length))) + \".com\")\n try:\n self.as_connection.list_size(key, \"contact_no\")\n except e.RecordNotFound as exception:\n assert exception.code == 2", "def validateVfabric(output ,arg_dict, key):\n id = arg_dict[key]\n counter = 0\n for char in id:\n counter += 1\n if re.compile('[0-9]+').match(char[0]) == None:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s' = '%s' is not a valid Id. ID should be numeric \" % \n\t\t\t\t(key,id)))\n return None\n if counter > lib.constants._ATTR_ID_LENGHT:\n\t output.completeOutputError(InvalidArgumentCount(descape =\"'%s'='%s' is not a valid Id. \\n ID should be numeric with Length = '%s' \" % (key,id, lib.constants._ATTR_ID_LENGHT)))\n return None\n return arg_dict", "def verify_length(src_json):\n error: str = \"\"\n if len(src_json.get(\"LedGroups\", [])) == 0:\n error = \"No or empty LedGroups\"\n if len(src_json.get(\"Sequencers\", [])) == 0:\n error = \"No or empty Sequencers\"\n return error", "def confirm_resdic_chainid_length(params):\n resdic_params = (p for p in params if p.startswith('resdic_'))\n for param in resdic_params:\n chainid = param.split('_')[-1]\n if len(chainid) > 1:\n raise ValueError(\n f\"We found the parameter {param!r} which has \"\n \"more than one character in the chain \"\n \"identifier. Chain IDs should have only one character.\"\n )", "def _check_subject_id(subject_id):\n if (subject_id and\n len(subject_id) > models.Subject.id.property.columns[\n 0].type.length):\n raise exception.SubjectNotFound()", "def test_has_correct_length(self) -> None:\n assert len(list(ccc.ConversationDataset())) == 7168", "def _validate_ordering_customer_50F(self, val):\n return val", "def test_systemstatus_name_length(self):\n\n # get object\n systemstatus_1 = Systemstatus.objects.get(systemstatus_name='systemstatus_1')\n # get max length\n max_length = systemstatus_1._meta.get_field('systemstatus_name').max_length\n # compare\n self.assertEqual(max_length, 30)", "def test_rfc_nickkey_length(s):\n assert len(util.rfc_nickkey(s)) == len(s)", "def check_embed(embed: Embed) -> bool:\n if len(embed) <= 6000:\n if hasattr(embed, \"title\"):\n if len(embed.title) <= 256:\n pass\n else:\n return False\n if len(embed.fields) <= 25:\n for field in embed.fields:\n if len(field.name) <= 69420:\n pass", "def payment_id_leading(self) -> bool:", "def test_has_correct_length(self) -> None:\n assert len(list(ccc.CompoundingConversationDataset())) == 131569", "def test_generation_length(self):\n for i in range(1, 20, 3):\n test_obj = FakeOrderBuilder(n=i).build()\n self.assertIs(len(test_obj), i)", "def validate_uuid(self, uuid_to_check):\r\n if re.fullmatch(BASE62_REGEX, uuid_to_check):\r\n return 20 <= len(uuid_to_check) <= 22\r\n else:\r\n return False", "def _check_description_count(self):\n\n for rec in self:\n if rec.description and len(rec.description)>50:\n raise except_orm(_('Warning!'),\n _(\"Description Lenght must be less than or equal to 50. \"))", "def test_has_correct_length(self) -> None:\n assert len(list(self._dataset)) == 7168", "def test_get_length(t_list):\n if not get_length(t_list) == 10:\n raise ValueError(\"Wrong number of transactions\")", "def test_check_name_is_3_parts():\n check_name_length()", "def test_40_phonenumbers_too_long(self):\n number_phone = self.samples[4]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)", "def test_longest_id(self):\r\n ids = \\\r\n \"R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969\".split(\r\n )\r\n seqs = dict(parse_fasta(dna_seqs.splitlines(),\r\n label_to_name=label_to_name))\r\n self.assertEqual(longest_id(ids, seqs), 'U1PLI_403')", "def test_add_too_long(self):\n self.open_url('/group/add')\n el = self.wd.find_element(By.ID, \"name\")\n el.send_keys(\"X\" * 256)\n self.submit_form(\"group_form\")\n self.assert_form_error(\"Field cannot be longer than 255 characters.\")", "def test_minlength():\n assert has_min_length(None, 8) is None\n assert has_min_length('abcd1234', 8) is None\n assert has_min_length('a', 8)", "def validate_chunk_width(chunk_width):\n if not isinstance(chunk_width, str):\n return False\n a = chunk_width.split(\",\")\n assert len(a) != 0 # would be code error\n for elem in a:\n try:\n i = int(elem)\n if i < 1 and i != -1:\n return False\n except:\n return False\n return True", "def validate_identifier(self, identifier):\n pass", "def isValid(t_id):\n\tstr_id=str(t_id).strip()\n\treturn str_id.isdigit()", "def _check_id(self, keyword):\n if keyword not in self.request.data:\n return '{} parameter is missing'.format(keyword)\n \"\"\" Check if <keyword> parameter is not None \"\"\"\n if self.request.data[keyword] == '':\n return '{} ID cannot be None'.format(keyword)\n \"\"\" Check if <keyword> parameter is > 0 \"\"\"\n if int(self.request.data[keyword]) < 1:\n return '{} ID must be an integer > 0'.format(keyword)", "def validVarConstructLength(self,varlen):\r\n if len(varlen)!=2:\r\n print 'variable must specify name and type'\r\n return False\r\n else:\r\n return True", "def validate_crx_id(crx_id):\n try:\n assert isinstance(crx_id, str)\n assert crx_id.isalnum()\n assert len(crx_id) == 32\n except AssertionError:\n raise MalformedExtId", "def checkValidId(self, id, prep_id = False):\n # RRD docs say that limit on vnames is 255 characters and that\n # A-Za-z0-9_ are the valid characters. Zenoss reserves - for it's own\n # use. Limiting to 200 instead just to leave room for whatever.\n # http://oss.oetiker.ch/rrdtool/doc/rrdgraph_data.en.html\n if len(id) > 200:\n return 'GraphPoint names can not be longer than 200 characters.'\n allowed = set(string.ascii_letters + string.digits + '_')\n attempted = set(id)\n if not attempted.issubset(allowed):\n return 'Only letters, digits and underscores are allowed' + \\\n ' in GraphPoint names.'\n return ZenModelRM.checkValidId(self, id, prep_id)", "def _update_length(self, field, tag_id, value):\n # pylint: disable=unused-argument\n if tag_id not in {8, 9, 10}:\n self._message_length += len(field) + 1\n if self._message_length >= self._max_length:\n raise FIXLengthTooLongError(\n f'message too long: {self._message_length}')", "def __len__(self):\n # Header + len(group id) + group id\n size = self.HEADER_LEN + 2 + len(self.group_id)\n # + len(member id) + member id\n size += 2 + len(self.member_id)\n return size", "def invalid_caterpillar_length(length):\n try:\n length = int(length)\n except ValueError:\n return \"`caterpillar-length` must be something that can be cast to an `int`\"\n\n if not 1 <= length <= len(app.desk.indeces):\n return (\n \"`caterpillar-length` must be a number between \"\n f\"1 and {len(app.desk.indeces)}\"\n )\n\n return False", "def __len__(self) -> int:\n return len(self.ids)", "def validateId(shortId):\n return shortId in [DockerUtil.getShortId(container) for container in DOCKER_CLIENT.containers.list()]", "def validate_kf_id(kf_id, prefix='TA'):\n if len(kf_id) != 11 or kf_id[:3] != prefix+'_':\n abort(400, f\"'{kf_id}' is not a valid kf_id\")", "def _validate_string_min_length(self, value):\n if self.min_length is not None:\n return len(str(value)) >= self.min_length\n else:\n return True", "def __len__(self):\n # Header + len(group id) + group id + generation id\n size = self.HEADER_LEN + 2 + len(self.group_id) + 4\n # + len(member id) + member id\n size += 2 + len(self.member_id)\n return size", "def checkValidId(self, id, prep_id = False):\n new_id = unquote(id)\n if prep_id: new_id = self.prepId(id)\n try:\n globalCheckValidId(self, new_id)\n return True\n except Exception:\n return str(sys.exc_info()[1])", "def DLEN(self):", "def test_details_nonnum_id(self):\n self.check_response(\n '/attributes/xyz',\n ('Please enter an integer value for Attribute ID',))", "def test_maxlength():\n assert has_max_length(None, 2) is None\n assert has_max_length('abcd1234', 2)\n assert has_max_length('a', 2) is None", "def field_length(self, fieldname):\n\t\t# todo: is this right?\n\t\tquery = {fieldname: {'$exists': 1}}\n\t\treturn self.index.collection.find(query).count()", "def test_book_isbn_length_must_be_ten(self):\n\n\t\twith self.client:\n\t\t\tadd_book = {\n\t\t\t\t'title': 'Hello Books',\n\t\t\t\t'isbn': '56987451'\n\t\t\t}\n\t\t\tlogin_data = self.login_test_user()\n\t\t\ttoken = login_data['auth_token']\n\t\t\tres = self.client.post(\n\t\t\t\tf'{URL_BOOKS}',\n\t\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\t\tcontent_type='application/json',\n\t\t\t\tdata=json.dumps(add_book)\n\t\t\t)\n\t\t\tres2 = json.loads(res.data.decode())\n\t\t\tself.assertIn('isbn length must be 10', str(res2))", "def _check_identifier_fields(self, doi: Doi):\n # Make sure we have an identifier to key off of\n if not doi.pds_identifier:\n raise InvalidRecordException(\n \"Record provided with missing PDS identifier field. \"\n \"Please ensure a LIDVID or similar identifier is provided for \"\n \"all DOI requests.\"\n )\n\n # Make sure the doi and id fields are consistent, if present\n if doi.doi and doi.id:\n prefix, suffix = doi.doi.split(\"/\")\n\n if suffix != doi.id:\n raise InvalidRecordException(\n f\"Record for {doi.pds_identifier} has inconsistent \"\n f\"DOI ({doi.doi}) and ID ({doi.id}) fields. Please reconcile \"\n \"the inconsistency and resubmit the request.\"\n )", "def length_of_name(self, name):\n length = len(name)\n if length > 10:\n self.show_message_when_name_very_long()\n return length", "def verify_length(src_json: Dict[str, List[str]]):\n if len(src_json.get(\"Leds\", [])) == 0:\n return \"No Leds in Group\"", "def check_len(password_length, alphabet_length, numb_length, symb_length):\r\n return (symb_length + alphabet_length + numb_length) == password_length", "def test_prep_not_size(self):\n pass", "def determine_max_length(sequences, ids):\n max_len = 0\n for i in ids:\n if len(sequences[i]) > max_len:\n max_len = len(sequences[i])\n\n return max_len", "def test_max_length_validation(self):", "def test_max_length_validation(self):", "def test_field_without_reaching_the_limit_2(self):\n client = self.base_scenario(\n frang_config=\"http_field_len 300;\",\n requests=[f\"POST /1234 HTTP/1.1\\r\\nHost: localhost\\r\\nX-Long: {'1' * 292}\\r\\n\\r\\n\"],\n )\n self.check_response(\n client,\n status_code=\"200\",\n warning_msg=\"frang: HTTP (in-progress )?field length exceeded for\",\n )", "def __len__(self):\n return len(self._order)", "def test_field_without_reaching_the_limit(self):\n client = self.base_scenario(\n frang_config=\"http_field_len 300;\",\n requests=[f\"POST /1234 HTTP/1.1\\r\\nHost: localhost\\r\\nX-Long: {'1' * 200}\\r\\n\\r\\n\"],\n )\n self.check_response(\n client,\n status_code=\"200\",\n warning_msg=\"frang: HTTP (in-progress )?field length exceeded for\",\n )", "def _is_size_key (self, key):\n return key == '$size' or key == 'size'", "def test_element_count_validation(self):\r\n TestSetModel.create(text_set={str(uuid4()) for i in range(65535)})\r\n with self.assertRaises(ValidationError):\r\n TestSetModel.create(text_set={str(uuid4()) for i in range(65536)})", "def validate(cls, **kwargs: Any) -> None:\n max_length = kwargs.get(\"max_length\", None)\n if max_length <= 0:\n raise ModelDefinitionError(\n \"Parameter max_length is required for field String\"\n )", "def test_validate_min_length(self):\n\n test_strings = [\n 'oa',\n 'al',\n 'v',\n ]\n\n testrow = TestSchema()\n\n for test_string in test_strings:\n testrow.string_min_field = test_string\n self.assertRaises(Exception, testrow.save)", "def test_category_name_field_max_length(self):\n cat = Category.objects.get(id=1)\n max_length = cat._meta.get_field('name').max_length\n self.assertEqual(max_length, 20)", "def test_invalid_numeric_code_length_format(self, cred, sender_id):\n resp = requests.get(verify_url.format('json', cred[0], cred[1],\n 'TestApp', test_number), params={'sender_id': sender_id})\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '3'\n assert resp.json()['error_text'] == parameter_is_too_long_msg.format('sender_id')", "def valid_message_length(self):\n if self.message_len() > 0:\n if self.message_len() <= self.max_msg_length:\n return True\n return False" ]
[ "0.6525614", "0.63231474", "0.6213551", "0.6202628", "0.6070328", "0.5964003", "0.5954448", "0.59133357", "0.58616084", "0.5828075", "0.5762874", "0.5752543", "0.57474065", "0.57324255", "0.56937385", "0.56560177", "0.5625483", "0.5602283", "0.5593534", "0.5563847", "0.55593526", "0.55439407", "0.5527439", "0.5524459", "0.5511306", "0.55011624", "0.5477324", "0.54625404", "0.5458476", "0.54437995", "0.5441035", "0.54146445", "0.5403819", "0.537403", "0.5342479", "0.5339004", "0.5336136", "0.5332084", "0.53298306", "0.53183824", "0.5310071", "0.5304321", "0.5302249", "0.5286762", "0.5284353", "0.5268008", "0.5266575", "0.5260451", "0.5245266", "0.52281195", "0.5220321", "0.5220255", "0.5217079", "0.5204908", "0.5201033", "0.51894623", "0.5184514", "0.51844764", "0.5173563", "0.5172751", "0.51599383", "0.5156658", "0.51534826", "0.5137121", "0.5121068", "0.5119494", "0.5114651", "0.5107901", "0.5105536", "0.5095653", "0.5092911", "0.5086558", "0.50832915", "0.50731087", "0.50655675", "0.5064684", "0.50646085", "0.5053514", "0.5051697", "0.5048649", "0.50464344", "0.50445026", "0.50429255", "0.50381464", "0.5025202", "0.502283", "0.5017825", "0.5015429", "0.50146914", "0.5008081", "0.5008081", "0.5007191", "0.5004043", "0.49876878", "0.49829715", "0.4972981", "0.49728724", "0.4969746", "0.49666455", "0.49624127", "0.49570101" ]
0.0
-1
Check length of OrderId fields (len = 51)
def test_06(self): assert 'False' == Api.requestBlock('test-06', charOrder=51)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validateID(id):\n\n if re.compile('[0-9]+').match(id) == None:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s' is not a valid Id. ID should be numeric with Length = '%s' \" \n\t\t\t% (id, lib.constants._ATTR_ID_LENGHT)))\n return -1\n else:\n # Check for the lenght \n counter = 0\n for char in id:\n counter += 1\n print counter , lib.constants._ATTR_ID_LENGHT\n if counter > lib.constants._ATTR_ID_LENGHT :\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s' exceeded the given length i.e Max Length = '%s'\" % \n\t\t\t(id, lib.constants._ATTR_ID_LENGHT)))\n return -1\n else:\n return 0\n return 0", "def is_order_id_valid(self):\n \n if not self.order_id:\n self.error_message = jsonify({'status':'error', 'message': 'orderId parameter missing'})\n return False\n if not re.match('^[a-f0-9]{32}$', self.order_id):\n self.error_message = jsonify({'status': 'error', 'message': 'orderId must be set to (hex) UUID'})\n return False\n return True", "def test_get_shortuuid_specific_length(self):\n id = get_shortuuid(length=10)\n self.assertTrue(len(id) == 10)", "def LengthTest(arr):\n\tif len(arr) == 8:\n\t\treturn True;\n\telif len(arr) == 7:\n\t\treturn IsMissingField('cid', arr)\n\telse:\n\t\treturn False", "def __len__(self):\n return len(self.token2id)", "def check_len( string_key ) : \r\n\r\n if len( string_key ) != 4 :\r\n\r\n raise Eggog( \"'%s': EGI wants the key to be exactly four characters!\" % (string_key, ) ) \r\n \r\n else :\r\n \r\n return True", "def payment_id_lengths(self) -> Set[int]:", "def max_length(verifield, required):\n if verifield is None: return True\n return len(verifield) <= required", "def min_length(verifield, required):\n if verifield is None: return True\n return len(verifield) >= required", "def _matchLength(self, length: int):\n return self._comparator['Length'] < length", "def test_length(self):\n form_data = self.form_data('c897B$eH@')\n form = self.form(data=form_data, user=self.u)\n self.assertFalse(form.is_valid())", "def checkPacketLength(self):\n return self.packetLength == len(self) - PRIMARY_HEADER_BYTE_SIZE - 1", "def validateIOmoduleId(output ,arg_dict , key):\n id = arg_dict[key]\n counter = 0\n for char in id:\n counter += 1\n if re.compile('[0-9]+').match(char[0]) == None:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s'='%s' is not a valid Id. \\n ID should be numeric \" % (key,id))) \n return None\n if counter > lib.constants._ATTR_ID_LENGHT:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s'='%s' is not a valid Id. \\n ID should be numeric with Length = '%s' \" % (key,id, lib.constants._ATTR_ID_LENGHT)))\n return None\n return arg_dict", "def test_field_length_matches_data_type_field_length(self):\n for known_message_type in KNOWN_MESSAGE_TYPES:\n for field in known_message_type.fields:\n if self.is_verbose:\n print 'Checking length setup of field {0} in message {1}'.format(field.name, known_message_type.name)\n self.assertEqual(field.length, field.data_type.length)", "def len12(self, len): # -> None:\n ...", "def __init__(self, length=DEFAULT_ID_LENGTH, excluded_chars=DEFAULT_EXCLUDED_CHARS):\n self.id_length = length\n self.excluded_chars = excluded_chars", "def check_length(length):\n if length > lengthLimit:\n err_str = \"The length value (%s) is higher than the \" % (length)\n err_str += \"limit length (%s)\" % (lengthLimit)\n raise ValueError(err_str)", "def checkLength(ish, length):\n if ish == \"Visa\":\n ok = (13,16)\n elif ish == \"American Express\":\n ok = (15,)\n elif ish == \"MasterCard\":\n ok = (16,)\n elif ish == \"Discover/Novus\":\n ok = (16,)\n else:\n raise TypeError, \"unknown issuer\"\n return length in ok", "def _check_values_len(self, data_batch: Dict[str, List[str]]):\n values_len = [len(v) for _, v in data_batch.items()]\n unique_len = len(set(values_len))\n assert unique_len == 1, \"Length of values are not consistent across\"", "def validate_order_id(self, value):\n\n if not Order.objects.filter(order_id=value).exists():\n raise ValidationError(f'Order with id {value} does not exist.')\n order_obj = Order.objects.get(order_id=value)\n if order_obj.assign_time is None:\n raise ValidationError(f'Order with id {value} was not assigned to any courier.')\n if order_obj.complete_time is not None:\n raise ValidationError(f'Order with id {value} has already been completed.')\n return value", "def test_has_correct_length(self) -> None:\n assert len(list(ccc.MessageDataset())) == 138737", "def _check_field_length(self, field, value, options=None):\n options = options if options else field.GetOptions()\n for (option, setting) in options.ListFields():\n if option.name == \"length\":\n if self.__gt_by_type(value, setting):\n if hasattr(field, \"name\"):\n raise FieldTooLongException(\"The field '\" + field.name +\n \"' is bigger than the allowed \" + str(setting) + \" bytes\")\n else:\n raise FieldTooLongException(\"List element '\" + str(value) +\n \"' is bigger than the allowed \" + str(setting) + \" bytes\")", "def _validate_ordering_customer_50K(self, val):\n return val", "def nextValidId(self, orderId):\n if getattr(self, '_my_orderid_data', None) is None:\n ## getting an ID which we haven't asked for\n ## this happens, IB server just sends this along occassionally\n self.init_nextvalidid()\n\n self._my_orderid_data.put(orderId)", "def test_client_email_max_length(self):\n request = Request.objects.get(id=1)\n max_length = request._meta.get_field('client_email').max_length\n self.assertEquals(max_length, 100)", "def _validate_ordering_customer_50A(self, val):\n return val", "def validate_length(column_name, value, length):\n valuelength = len(value)\n if valuelength > int(length) >= 0:\n return \"{0} : value '{1}' is greater than the specified length {2}\".format(column_name, value, length)\n elif valuelength < int(length) and int(length) >= 0:\n return \"{0} : value '{1}' is less than the specified length {2}\".format(column_name, value, length)\n\n return None", "def mobile_len_validator(mobile):\n if len(mobile) != 13:\n raise ValidationError('Invalid mobile len')", "def test_field_len(self):\n client = self.base_scenario(\n frang_config=\"http_field_len 300;\",\n requests=[f\"POST /1234 HTTP/1.1\\r\\nHost: localhost\\r\\nX-Long: {'1' * 320}\\r\\n\\r\\n\"],\n )\n self.check_response(\n client,\n status_code=\"403\",\n warning_msg=\"frang: HTTP (in-progress )?field length exceeded for\",\n )", "def len23(self, len): # -> None:\n ...", "def test_client_ip_max_length(self):\n request = Request.objects.get(id=1)\n max_length = request._meta.get_field('client_ip').max_length\n self.assertEquals(max_length, 100)", "def verify_length(src_json):\n error: str = \"\"\n if len(src_json.get(\"LedGroups\", [])) == 0:\n error = \"No or empty LedGroups\"\n if len(src_json.get(\"Sequencers\", [])) == 0:\n error = \"No or empty Sequencers\"\n return error", "def check_id(self, id):", "def confirm_resdic_chainid_length(params):\n resdic_params = (p for p in params if p.startswith('resdic_'))\n for param in resdic_params:\n chainid = param.split('_')[-1]\n if len(chainid) > 1:\n raise ValueError(\n f\"We found the parameter {param!r} which has \"\n \"more than one character in the chain \"\n \"identifier. Chain IDs should have only one character.\"\n )", "def _validate_length(data, min, max, err): # lint-amnesty, pylint: disable=redefined-builtin\n if len(data) < min or len(data) > max:\n raise errors.AccountDataBadLength(err)", "def test_has_correct_length(self) -> None:\n assert len(list(self._dataset)) == 7168", "def validate_uuid(self, uuid_to_check):\r\n if re.fullmatch(BASE62_REGEX, uuid_to_check):\r\n return 20 <= len(uuid_to_check) <= 22\r\n else:\r\n return False", "def payment_id_leading(self) -> bool:", "def _requiredLength(self):\n if self.partner is not None:\n if self.partner.level == self.level:\n return len(self.partner)\n elif self.partner.level < self.level:\n return 1\n elif self._value is not None:\n return 1\n else:\n return 0", "def test_has_correct_length(self) -> None:\n assert len(list(ccc.ConversationDataset())) == 7168", "def validateVfabric(output ,arg_dict, key):\n id = arg_dict[key]\n counter = 0\n for char in id:\n counter += 1\n if re.compile('[0-9]+').match(char[0]) == None:\n output.completeOutputError(InvalidArgumentCount(descape =\"'%s' = '%s' is not a valid Id. ID should be numeric \" % \n\t\t\t\t(key,id)))\n return None\n if counter > lib.constants._ATTR_ID_LENGHT:\n\t output.completeOutputError(InvalidArgumentCount(descape =\"'%s'='%s' is not a valid Id. \\n ID should be numeric with Length = '%s' \" % (key,id, lib.constants._ATTR_ID_LENGHT)))\n return None\n return arg_dict", "def test_rfc_nickkey_length(s):\n assert len(util.rfc_nickkey(s)) == len(s)", "def test_prep_charfield_size(self):\n pass", "def test_length(self):\n for length in range(2, 30):\n self.assertEqual(len(generate_password(length)), length)", "def _validate_length(self, value):\n return (self.maximum_length is None) or (len(value) <= self.maximum_length)", "def __len__(self):\n # Header + len(group id) + group id\n size = self.HEADER_LEN + 2 + len(self.group_id)\n # + len(member id) + member id\n size += 2 + len(self.member_id)\n return size", "def test_field_len(self):\n client = self.base_scenario(\n frang_config=\"http_field_len 300;\",\n requests=[self.post_request + [(\"header\", \"x\" * 320)]],\n )\n self.check_response(\n client,\n status_code=\"403\",\n warning_msg=\"frang: HTTP (in-progress )?field length exceeded for\",\n )", "def test_uid_max_length(self):\n\n field = self.image._meta.get_field(\"uid\")\n self.assertEqual(field.max_length, 64)", "def test_has_correct_length(self) -> None:\n assert len(list(ccc.CompoundingConversationDataset())) == 131569", "def validate_identifier(self, identifier):\n pass", "def __len__(self):\n # Header + len(group id) + group id + generation id\n size = self.HEADER_LEN + 2 + len(self.group_id) + 4\n # + len(member id) + member id\n size += 2 + len(self.member_id)\n return size", "def _check_max_length(self, p: list) -> bool:\n\n return (\n len(p[0].split(\" \")) < self.max_length\n and len(p[1].split(\" \")) < self.max_length\n )", "def _check_identifier_fields(self, doi: Doi):\n # Make sure we have an identifier to key off of\n if not doi.pds_identifier:\n raise InvalidRecordException(\n \"Record provided with missing PDS identifier field. \"\n \"Please ensure a LIDVID or similar identifier is provided for \"\n \"all DOI requests.\"\n )\n\n # Make sure the doi and id fields are consistent, if present\n if doi.doi and doi.id:\n prefix, suffix = doi.doi.split(\"/\")\n\n if suffix != doi.id:\n raise InvalidRecordException(\n f\"Record for {doi.pds_identifier} has inconsistent \"\n f\"DOI ({doi.doi}) and ID ({doi.id}) fields. Please reconcile \"\n \"the inconsistency and resubmit the request.\"\n )", "def test_neg_list_size_with_nonexistent_key(self):\n charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'\n minLength = 5\n maxLength = 30\n length = random.randint(minLength, maxLength)\n key = ('test', 'demo', ''.join(map(lambda unused:\n random.choice(charSet),\n range(length))) + \".com\")\n try:\n self.as_connection.list_size(key, \"contact_no\")\n except e.RecordNotFound as exception:\n assert exception.code == 2", "def isValid(t_id):\n\tstr_id=str(t_id).strip()\n\treturn str_id.isdigit()", "def _validate_ordering_customer_50F(self, val):\n return val", "def test_generation_length(self):\n for i in range(1, 20, 3):\n test_obj = FakeOrderBuilder(n=i).build()\n self.assertIs(len(test_obj), i)", "def validate_orcid(orcid):\n orcid_regex = r\"\\A[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[0-9X]\\Z\"\n\n if not re.match(orcid_regex, orcid):\n raise ValueError(f\"The format of this ORCID is incorrect: {orcid}\")\n\n if _orcid_checksum_digit(orcid[:-1]) != orcid[-1:]:\n raise ValueError(f\"{orcid} is not a valid ORCID\")\n\n return True", "def test_minlength():\n assert has_min_length(None, 8) is None\n assert has_min_length('abcd1234', 8) is None\n assert has_min_length('a', 8)", "def validate_crx_id(crx_id):\n try:\n assert isinstance(crx_id, str)\n assert crx_id.isalnum()\n assert len(crx_id) == 32\n except AssertionError:\n raise MalformedExtId", "def check_embed(embed: Embed) -> bool:\n if len(embed) <= 6000:\n if hasattr(embed, \"title\"):\n if len(embed.title) <= 256:\n pass\n else:\n return False\n if len(embed.fields) <= 25:\n for field in embed.fields:\n if len(field.name) <= 69420:\n pass", "def test_check_name_is_3_parts():\n check_name_length()", "def test_40_phonenumbers_too_long(self):\n number_phone = self.samples[4]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)", "def _check_subject_id(subject_id):\n if (subject_id and\n len(subject_id) > models.Subject.id.property.columns[\n 0].type.length):\n raise exception.SubjectNotFound()", "def test_get_length(t_list):\n if not get_length(t_list) == 10:\n raise ValueError(\"Wrong number of transactions\")", "def validate_chunk_width(chunk_width):\n if not isinstance(chunk_width, str):\n return False\n a = chunk_width.split(\",\")\n assert len(a) != 0 # would be code error\n for elem in a:\n try:\n i = int(elem)\n if i < 1 and i != -1:\n return False\n except:\n return False\n return True", "def DLEN(self):", "def __len__(self):\n return len(self._order)", "def test_id_centos_7_7(self):\n self.assertEqual(jc.parsers.id.parse(self.centos_7_7_id, quiet=True), self.centos_7_7_id_json)", "def check_input_digits_count(self):\n check = len(str(self.input)) == 4\n return check", "def test_longest_id(self):\r\n ids = \\\r\n \"R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969\".split(\r\n )\r\n seqs = dict(parse_fasta(dna_seqs.splitlines(),\r\n label_to_name=label_to_name))\r\n self.assertEqual(longest_id(ids, seqs), 'U1PLI_403')", "def check4Id(self, element):\n if self.__identifier in element.attrib:\n return True\n else:\n return False", "def validVarConstructLength(self,varlen):\r\n if len(varlen)!=2:\r\n print 'variable must specify name and type'\r\n return False\r\n else:\r\n return True", "def check_osd_id(osd_id):\n if not re.match(r'^[0-9]+$', osd_id):\n raise Error('osd id is not numeric', osd_id)", "def test_systemstatus_name_length(self):\n\n # get object\n systemstatus_1 = Systemstatus.objects.get(systemstatus_name='systemstatus_1')\n # get max length\n max_length = systemstatus_1._meta.get_field('systemstatus_name').max_length\n # compare\n self.assertEqual(max_length, 30)", "def test_details_nonnum_id(self):\n self.check_response(\n '/attributes/xyz',\n ('Please enter an integer value for Attribute ID',))", "def checkValidId(self, id, prep_id = False):\n # RRD docs say that limit on vnames is 255 characters and that\n # A-Za-z0-9_ are the valid characters. Zenoss reserves - for it's own\n # use. Limiting to 200 instead just to leave room for whatever.\n # http://oss.oetiker.ch/rrdtool/doc/rrdgraph_data.en.html\n if len(id) > 200:\n return 'GraphPoint names can not be longer than 200 characters.'\n allowed = set(string.ascii_letters + string.digits + '_')\n attempted = set(id)\n if not attempted.issubset(allowed):\n return 'Only letters, digits and underscores are allowed' + \\\n ' in GraphPoint names.'\n return ZenModelRM.checkValidId(self, id, prep_id)", "def validate_identifier(identifier: str) -> bool:\n if identifier[:2] == 'NR':\n return True\n\n if len(identifier) < 9:\n return False\n\n try:\n d = int(identifier[-7:])\n if d == 0:\n return False\n except ValueError:\n return False\n # TODO This is not correct for entity types that are not Coops\n if identifier[:-7] not in ('CP', 'XCP', 'BC'):\n return False\n\n return True", "def is_valid(self):\n if len(self) <= 64 and re.match(RE_VALID_UID, self):\n return True\n\n return False", "def __len__(self):\r\n return 4", "def validate(msg):\n valid = True\n\n if not msg or len(msg) < 4:\n return False, -1, -1\n\n checksum = msg[-1]\n length = int(''.join('{:02X}'.format(byte) for byte in msg[1:3]), 16)\n # try:\n # # here works for pyton 3 only\n # length = int.from_bytes(msg[1:3], byteorder='big', signed=False)\n # except Exception:\n # length = int(''.join('{:02X}'.format(byte) for byte in msg[1:3]), 16)\n\n validlen = len(msg[3:-1])\n validsum = 0xFF - ((sum(msg[3:-1])) & 0xFF)\n\n # print('length: ' + str(self.length) + '; ' + str(validlen))\n # print('checksum: ' + str(self.checksum) + '; ' + str(validsum))\n\n # check sanity of computed Length and Checksum with the one in the message\n if (checksum != validsum) or (length != validlen):\n valid = False\n\n return valid, length, checksum", "def check_len(password_length, alphabet_length, numb_length, symb_length):\r\n return (symb_length + alphabet_length + numb_length) == password_length", "def __len__(self) -> int:\n return len(self.ids)", "def invalid_caterpillar_length(length):\n try:\n length = int(length)\n except ValueError:\n return \"`caterpillar-length` must be something that can be cast to an `int`\"\n\n if not 1 <= length <= len(app.desk.indeces):\n return (\n \"`caterpillar-length` must be a number between \"\n f\"1 and {len(app.desk.indeces)}\"\n )\n\n return False", "def validate_kf_id(kf_id, prefix='TA'):\n if len(kf_id) != 11 or kf_id[:3] != prefix+'_':\n abort(400, f\"'{kf_id}' is not a valid kf_id\")", "def validateId(shortId):\n return shortId in [DockerUtil.getShortId(container) for container in DOCKER_CLIENT.containers.list()]", "def checkValidId(self, id, prep_id = False):\n new_id = unquote(id)\n if prep_id: new_id = self.prepId(id)\n try:\n globalCheckValidId(self, new_id)\n return True\n except Exception:\n return str(sys.exc_info()[1])", "def _validate_identifier(self, identifier):\n for c in identifier:\n if c not in string.letters + string.digits + '_':\n return False\n return True", "def __len__(self):\n return len(self._key_order)", "def verify_length(src_json: Dict[str, List[str]]):\n if len(src_json.get(\"Leds\", [])) == 0:\n return \"No Leds in Group\"", "def test_size_returns_length(dq_3):\n assert dq_3.size() == 3", "def source_data_key_length_check(source_data_key, algorithm):\n if len(source_data_key.data_key) != algorithm.kdf_input_len:\n raise InvalidDataKeyError(\n \"Invalid Source Data Key length {actual} for algorithm required: {required}\".format(\n actual=len(source_data_key.data_key), required=algorithm.kdf_input_len\n )\n )", "def _check_id(self, keyword):\n if keyword not in self.request.data:\n return '{} parameter is missing'.format(keyword)\n \"\"\" Check if <keyword> parameter is not None \"\"\"\n if self.request.data[keyword] == '':\n return '{} ID cannot be None'.format(keyword)\n \"\"\" Check if <keyword> parameter is > 0 \"\"\"\n if int(self.request.data[keyword]) < 1:\n return '{} ID must be an integer > 0'.format(keyword)", "def _check_size_of_lists(sequence_header, secstr_header):\n if len(sequence_header) != len(sequence):\n sys.exit(\"The size of the sequence list and sequence header doesn't match\")\n else:\n return True", "def test_lengths(self):\n self.assertEqual(size(attempt.Z), 201)\n self.assertEqual(size(attempt.W), 201)", "def __len__(self) -> int:\n return 3", "def test_other_identifier(self):\n other_id_field = \\\n self.record.find('field[@name=\\'other_identifier\\']')\n self.assertEqual(other_id_field.text, 'HOSNUM0000',\n 'other Identifier incorrect')", "def test_invalid_numeric_code_length_format(self, cred, sender_id):\n resp = requests.get(verify_url.format('json', cred[0], cred[1],\n 'TestApp', test_number), params={'sender_id': sender_id})\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '3'\n assert resp.json()['error_text'] == parameter_is_too_long_msg.format('sender_id')", "def __len__(self) -> int:", "def __len__(self) -> int:", "def __len__(self):\n return len(self.ids)" ]
[ "0.6447101", "0.64331466", "0.6345241", "0.6291078", "0.5997579", "0.59655714", "0.58881146", "0.58828425", "0.58140504", "0.57821655", "0.5780664", "0.5711595", "0.5706793", "0.56825346", "0.5664026", "0.5637399", "0.55534965", "0.5535516", "0.55239874", "0.5517009", "0.54965043", "0.54751176", "0.5470622", "0.54517806", "0.5446611", "0.5437444", "0.5404285", "0.5381236", "0.5376474", "0.5356663", "0.534293", "0.53371996", "0.53306514", "0.53229856", "0.5316179", "0.5314621", "0.5314475", "0.53033197", "0.52827245", "0.52819175", "0.5265849", "0.52588", "0.52563727", "0.52512854", "0.5251052", "0.52433646", "0.52393794", "0.52360135", "0.5233688", "0.52279294", "0.5225715", "0.52123696", "0.51861286", "0.5185202", "0.5181992", "0.5176079", "0.5160858", "0.5157563", "0.5152702", "0.51512593", "0.51466626", "0.5142294", "0.51352036", "0.5129219", "0.5123703", "0.51192814", "0.5117888", "0.5111903", "0.5110475", "0.51083237", "0.5100017", "0.50975084", "0.50921667", "0.50833815", "0.5077182", "0.50752866", "0.50743043", "0.50730574", "0.5070281", "0.5069826", "0.5059365", "0.50567025", "0.5050304", "0.50462973", "0.50457215", "0.5038353", "0.5034846", "0.5033114", "0.5031404", "0.5023737", "0.5009752", "0.50083506", "0.5007809", "0.49843284", "0.49825454", "0.49761832", "0.4952359", "0.494452", "0.49443784", "0.49443784", "0.49335408" ]
0.0
-1
Send special characters in OrderId field
def test_07(self): assert 'False' == Api.requestBlock('test-07', charOrder='~!@#$%%^&*()=_+<>?/')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def order_id(self) -> str:\n return pulumi.get(self, \"order_id\")", "def __str__(self):\n\n return f'{self.order_id}'", "def __str__(self):\n return self.order_id", "def _escape_identifier(self, value):\n\n return value.replace('\"', '\"\"')", "def format_item(self, order):\n return unicode(order)", "def prepId(self, id, subchar='_'):\n return globalPrepId(id, subchar)", "def quote_identifier(self, value):\n\n return self.initial_quote + self._escape_identifier(value) + self.final_quote", "def _generate_order_id():\n current_milli_time = str(int(round(time.time())))\n rand_str = random_string_generator()\n\n return '%s%s' % (rand_str, current_milli_time)", "def id(self):\n return \"{model:s}--{serial:08x}\".format(model=self.model.replace('-',''), serial=self.serial_number).lower()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def generate_random_order_id():\r\n return ''.join(random.choices(string.ascii_lowercase + string.digits, k=12))", "def id_text(self) -> str:\n return self.source_system + \" - \" + self.external_id + \" (\" + str(self.internal_id) + \")\"", "def get_id(self):\n return escape(self.email)", "def generate_order_id():\n rands = []\n for i in range(0, 16):\n r = random()\n rand = 4294967296.0 * r\n rands.append((int(rand) >> ((3 & i) << 3)) & 255)\n\n hexa = []\n for i in range(0, 256):\n hexa.append(str(hex(i+256)).lstrip(\"0x\").rstrip(\"L\")[1:])\n\n id = \"\"\n for i in range(0, 16):\n id += hexa[rands[i]]\n\n if (i == 3) or (i == 5) or (i == 7) or (i == 9):\n id += \"-\"\n\n return(id)", "def _encode_resource_id(self, resource_id):\n return urlquote(resource_id, safe='~')", "def to_id(self):\n return \"%s%s%s%s%s\" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,\n self.start_time,\n NoOpTraceId.DELIMITER, self.__number)", "def get_orderId(self):\n return self.order_id", "def en_quote_as_entity(self):\n pass", "def _unescape_identifier(self, value):\n\n return value.replace('\"\"', '\"')", "def get_orderId(self):\n return self.metadata['orderId']", "def format_unique_id(address: str) -> str:\n return address.replace(\":\", \"\").lower()", "def get_id(self):\n return unicode(self.id)", "def identifier(self, value: str):\n if Business.validate_identifier(value):\n self._identifier = value\n else:\n raise BusinessException('invalid-identifier-format', 406)", "def format_id(self, html=False):\n if self.term_type == 'C':\n full_id = 'KEGG:' + self.org_prefix + self.term_id\n else:\n full_id = 'KEGG:' + self.term_type\n\n if html:\n term_id = self.id_anchor_fmt % (self.url(), full_id)\n else:\n term_id = full_id\n return term_id", "def get_orderno(self):\n WebDriverWait(self.driver, 20).until(EC.visibility_of_element_located((By.CSS_SELECTOR,'#order-no')))\n order_id=self.driver.find_element_by_css_selector('#order-no').text\n return order_id", "def generate_id(self):\n unique_id = \"\"\n\n while len(unique_id) < self.id_length:\n ascii_number = self.get_random_bits()\n\n if self.is_approved_ascii(ascii_number):\n random_char = chr(ascii_number)\n\n if not self.is_excluded_char(random_char):\n unique_id += chr(ascii_number)\n\n return unique_id", "def unique_id() -> str:", "def unique_id(self) -> str:\n return f\"{self.wallet_id}{self.WALLET_KEY_POSTFIX}\"", "def order_id(self, order_id):\n\n self._order_id = order_id", "def order_id(self, order_id):\n\n self._order_id = order_id", "def order_id(self, order_id):\n\n self._order_id = order_id", "def get_actual_id(translated):", "def unique_id(self) -> str:\n return '{0}_{1}'.format(self._mac.replace(':', ''), self.entity_id)", "def test_other_identifier(self):\n other_id_field = \\\n self.record.find('field[@name=\\'other_identifier\\']')\n self.assertEqual(other_id_field.text, 'HOSNUM0000',\n 'other Identifier incorrect')", "def to_id(id):\n return int(id.strip('<@&#!>')) if id.isdigit() else id.strip('<@&#!>')", "def custom_id(self) -> str:\n return self._underlying.custom_id", "def setID(id, content):\n return content.replace(\"$ID\", str(id))", "def getSerpentId(self):\n symbol = self.element.symbol.capitalize()\n return \"{}-{}{}\".format(symbol, self.a, \"m\" if self.state else \"\")", "def encode_key_for_mongo(fieldname):\r\n for char in [\".\", \"$\"]:\r\n fieldname = fieldname.replace(char, '%{:02x}'.format(ord(char)))\r\n return fieldname", "def id(self) -> str:\n pass", "def test_convert_id():", "def getId(self):\n return '%s%08X' % (self.id,self.index)", "def __str__(self):\n return \"{vendor_id}:{product_id}\".format(vendor_id=self.vendor_id, product_id=self.product_id)", "def ident(self):\r\n text = self.component.get(\"id\", \"\")\r\n # strip surrounding curly braces from id\r\n return re.sub(\"[{}]\", \"\", text)", "def string_id(self):\n id = self.id()\n if not isinstance(id, basestring):\n id = None\n return id", "def order_w_order_id(order_id):\n # Megnyutjuk a kapcsolatot\n conn = get_db()\n try:\n # Keszitunk egy cursort\n cur = conn.cursor()\n try:\n # Ezt a parameteres SQL lekerdezest hajtjuk vegre, mellyel megkapjuk az adott\n # order_id-ju megrendelest.\n cur.execute('SELECT description, vehicle_type, quantity, origin, destination,' +\n ' order_date, deadline_date, comment_text FROM orders WHERE' +\n ' order_id = :order_id', order_id=order_id)\n # Ebben a valtozoban lesz az eredmenytabla egyetlen\n # sora (Biztosan 1 lesz, mert az order_id egyedi)\n result = cur.fetchone()\n # Ha nem talaltunk ilyen megrendelest, szolunk a felhasznalonak\n if result is None:\n abort(404)\n else:\n # 2. feladat - lekerdezzuk az adott orszag valutajat\n #\n # Az origin illetve destination mezokben megkeressuk az orszag betujelet\n # Ez mindig a string vegen, ( es ) jelek kozott allo 2 betu.\n # Mivel ezek nagybetuvel irodtak at kell konvertalnunk kisbeture.\n # Ezek futtatjuk a kerest, majd a kapott eredmenyt JSON formatumra parsoljuk.\n # Ebbol kiolvassuk a valuta erteket, amit majd atadunk a kimeneti mezonknek.\n origin001 = result[3]\n origin_len = len(origin001)\n origin_tmp = origin001[origin_len-3:origin_len-1]\n origin_url = \"http://rapid.eik.bme.hu:9080/currency_ws/currencies/\" + origin_tmp.lower() + \".json\"\n r1 = requests.get(origin_url)\n var1 = r1.json()\n origin_currency = var1['currency']\n \n destination001 = result[4]\n destination_len = len(destination001)\n destination_tmp = destination001[destination_len-3:destination_len-1]\n destination_url = \"http://rapid.eik.bme.hu:9080/currency_ws/currencies/\" + destination_tmp.lower() + \".json\"\n r2 = requests.get(destination_url)\n var2 = r2.json()\n destination_currency = var2['currency']\n # Visszaterunk a JSON formatumu dictionary-vel,\n # ami mindent a megfelelo formatumban tarol\n return jsonify({\"description\": result[0],\n \"vehicle_type\": result[1],\n \"quantity\": result[2],\n \"origin\": result[3],\n \"destination\": result[4],\n \"order_date\": result[5].date().isoformat(),\n \"deadline_date\": result[6].date().isoformat(),\n \"comment_text\": result[7],\n \"origin_currency\": origin_currency,\n\"destination_currency\": destination_currency})\n finally:\n cur.close()\n finally:\n conn.close()", "def get_order_guid(order_id):\n return linnapi.orders.get_order_guid_by_order_id(order_id)", "def client_id(self) -> str:", "def _get_hardware_id(cls, vendor_id, product_id):\n return \"%s%s\" % (vendor_id.ljust(8), product_id.ljust(16))", "def format_result(self, order):\n return u\"%s\" % (order)", "def set_ID(self, x):\n x = str(x)\n if self.ID != x:\n self.ID = x", "def order_item_id(self):\n return self._order_item_id", "def _encode_key(self, key: str) -> str:\n return key", "def autoid(self) -> str:", "def autoid(self) -> str:", "def get_id(self): # real signature unknown; restored from __doc__\n return \"\"", "def _clean_id(self, dirty_id):\n return self.wsid_regex.sub(\"\", dirty_id.replace(\" \", \"_\"))", "def id(self):\n # Might also be a first 12-characters shortcut.\n return self._id", "def __str__(self) -> str:\n return f'P{self.id}'", "def unique_id(self):\n return f\"{self.device.id}-{self.key}\"", "def _write_identifier(self, identifier):\n id_format = \"{0:\\xAC>\" + str(self.max_idx_len) + \"}\"\n identifier = str(identifier)\n identifier = id_format.format(identifier).encode(\"latin-1\")\n self.file_out.write(identifier)\n return", "def prefixed_id(self, id):\n #The reason why we don't just use the external id and put the model as the prefix\n #is to avoid unique ir_model_data#name per module constraint violation.\n return self._name.replace('.', '_') + '/' + str(id)", "def get_id(self):\n \"\"\"Requires use of Python 3\"\"\"\n return str(self.id)", "def order(self, order_id, symbol, **kwargs):\n pass", "def prepare_key(self, key):\n return smart_str(key)", "def _id(self):\n result = ''\n while self.current_char is not None and self.current_char.isalnum() or self.current_char == '_':\n result += self.current_char\n self.advance()\n\n return Token(ID, result)", "def cypher_escape(identifier, **kwargs):\n if not isinstance(identifier, string_types):\n raise TypeError(type(identifier).__name__)\n encoder = CypherEncoder(**kwargs)\n return encoder.encode_key(identifier)", "def __str__(self):\n return (\"%s - %s %s\") % (self.in_id, self.letter, self.shop)", "def __str__(self):\n return \"%s (%s-%02d)\" % (self.nombre, self.tipo.slug.upper(), self.id)", "def fixture_microbial_order_id():\n return \"microbial_order_test\"", "def _generate_order_number(self):\n self.order_number = ''.join((random.choice(string.ascii_lowercase + string.digits) for _ in xrange(30)))", "def create_obj_id_for_query(id_dict: Dict) -> Text:\n return \",\".join([f\"{key}={value}\" for key, value in id_dict.items()])", "def get_id(self):\n return str(self._id)", "def get_id(self):\n return str(self._id)", "def reqid(self) -> str:", "def reqid(self) -> str:", "def get_product_id(self):\n pid = \"%s-%s-%s-%s\" % (self.valid.strftime(\"%Y%m%d%H%M\"),\n self.source, self.wmo, self.afos)\n return pid.strip()", "def get_org_id(record, org_id_prefix=CASC_ORG_ID_PREFIX):\n\n def hash_id(w):\n return hashlib.md5(w.encode(\"utf8\")).hexdigest()[0:8]\n\n return \"-\".join(\n [org_id_prefix, hash_id(str(record[\"name\"]) + str(record[\"postcode\"]))]\n )", "def _getEntityStartKey(entityId):\n return \"%s\\x1D\" % entityId", "def _expanded_id(name: str, sep: str = '_') -> str:\n return sep.join([el.lower()\n for el in re.split(r'([A-Z]+[^A-Z]*)', name)\n if el])", "def unique_id(self) -> str:\n return \"{}-{}-{}\".format(*self._id)", "def make_trace_id(trace_id: bytes) -> str:\n return base64.b64encode(trace_id).decode(\"utf-8\")", "def __str__(self):\n return str(self._id())", "def id(self) -> str:\n return self.properties[DBUS_ATTR_ID]", "def sanitize_id (self, id):\n return re.sub (self.sanitize_pat, '', id)", "def id_str(self):\n if hasattr(self, 'id'):\n return str(self.id)\n else:\n return 'obj%s' % id(self)", "def build_id():\n return \"test123\"", "def _generate_cart_id():\n cart_id = ''\n characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890!@#$%^&*()'\n cart_id_length = 50\n for y in range(cart_id_length):\n cart_id += characters[random.randint(0, len(characters) - 1)]\n return cart_id", "def clean_note_prefix(self):\n data = self.cleaned_data[\"note_prefix\"]\n return data.encode(\"ascii\") if data != \"\" else data", "def __str__(self) -> str:\n return self.id", "def get_hex_id(fullRouterName):\n hexId = \"\"\n if fullRouterName.count(\"=\") > 0:\n hexId = fullRouterName.split(\"=\")[0]\n else:\n hexId = fullRouterName.split(\"~\")[0]\n hexId = hexId.replace(\"$\", \"\")\n return hexId", "def html_id(self):\r\n return unicode(self)", "def html_id(self):\r\n return unicode(self)", "def id_str(self):\n return self.status.id_str", "def __str__(self):\n return str(self.id)", "def __str__(self):\n return str(self.id)", "def __str__(self):\n return str(self.id)", "def __str__(self):\n return str(self.id)", "def id(self, value: str):\n self._id = value" ]
[ "0.6710151", "0.66620994", "0.6476178", "0.61907893", "0.6042138", "0.59341186", "0.5875805", "0.58528703", "0.5759892", "0.5745699", "0.5745699", "0.5745699", "0.56400716", "0.5607561", "0.56031054", "0.55670196", "0.55341715", "0.55016094", "0.548358", "0.54751325", "0.54484123", "0.54391384", "0.5430797", "0.54095393", "0.5404094", "0.53634685", "0.53477764", "0.5338057", "0.5298369", "0.5293537", "0.5288637", "0.5288637", "0.5288637", "0.5284999", "0.52845865", "0.5269556", "0.5257922", "0.5232306", "0.5232096", "0.5227618", "0.5218061", "0.52064466", "0.5203091", "0.5200776", "0.5195961", "0.51880306", "0.51878506", "0.5186903", "0.5185988", "0.5177198", "0.51578194", "0.5154856", "0.51400185", "0.51360637", "0.51344985", "0.5133331", "0.5133331", "0.5108982", "0.5100402", "0.5099962", "0.50978655", "0.50958973", "0.5090776", "0.50833243", "0.5075073", "0.50747764", "0.50695306", "0.506887", "0.506647", "0.5059156", "0.50590944", "0.5058727", "0.5053835", "0.5042067", "0.50349635", "0.50349635", "0.5034905", "0.5034905", "0.5033357", "0.50330114", "0.5031742", "0.5030606", "0.5023765", "0.50235844", "0.5021821", "0.5021145", "0.5020651", "0.5014043", "0.50125796", "0.5010449", "0.5010014", "0.5009388", "0.50076777", "0.5006863", "0.5006863", "0.5004208", "0.5002403", "0.5002403", "0.5002403", "0.5002403", "0.50017446" ]
0.0
-1
Send different values in OrderId fields
def test_08(self): assert 'False' == Api.requestBlock('test-08', differentOrderId=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def order_id(self) -> str:\n return pulumi.get(self, \"order_id\")", "def order_id(self, order_id):\n\n self._order_id = order_id", "def order_id(self, order_id):\n\n self._order_id = order_id", "def order_id(self, order_id):\n\n self._order_id = order_id", "def __str__(self):\n return self.order_id", "def __str__(self):\n\n return f'{self.order_id}'", "def get_orderId(self):\n return self.order_id", "def save(self, *args, **kwargs):\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def save(self, *args, **kwargs):\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def get_specific_order(self,order_id):\n self.query = \"SELECT * FROM orders WHERE order_id=%s\"\n self.input = (order_id,) #tuple to support indexing\n self.query_1 = \"SELECT order_id FROM orders ORDER BY order_id DESC LIMIT 1.\"\n self.event = \"admin_get_specific_order\"\n self.error = \"Invalid order id\"\n self.message = \"Successfully fetched the order.\"\n self.order_id = order_id\n self.db_error = None", "def post_order(self, order):\n url = self.build_url(\"orders/\")\n res = post(url, json=order)\n if res.ok:\n return res.json()[\"id\"]\n return None", "def order_w_order_id(order_id):\n # Megnyutjuk a kapcsolatot\n conn = get_db()\n try:\n # Keszitunk egy cursort\n cur = conn.cursor()\n try:\n # Ezt a parameteres SQL lekerdezest hajtjuk vegre, mellyel megkapjuk az adott\n # order_id-ju megrendelest.\n cur.execute('SELECT description, vehicle_type, quantity, origin, destination,' +\n ' order_date, deadline_date, comment_text FROM orders WHERE' +\n ' order_id = :order_id', order_id=order_id)\n # Ebben a valtozoban lesz az eredmenytabla egyetlen\n # sora (Biztosan 1 lesz, mert az order_id egyedi)\n result = cur.fetchone()\n # Ha nem talaltunk ilyen megrendelest, szolunk a felhasznalonak\n if result is None:\n abort(404)\n else:\n # 2. feladat - lekerdezzuk az adott orszag valutajat\n #\n # Az origin illetve destination mezokben megkeressuk az orszag betujelet\n # Ez mindig a string vegen, ( es ) jelek kozott allo 2 betu.\n # Mivel ezek nagybetuvel irodtak at kell konvertalnunk kisbeture.\n # Ezek futtatjuk a kerest, majd a kapott eredmenyt JSON formatumra parsoljuk.\n # Ebbol kiolvassuk a valuta erteket, amit majd atadunk a kimeneti mezonknek.\n origin001 = result[3]\n origin_len = len(origin001)\n origin_tmp = origin001[origin_len-3:origin_len-1]\n origin_url = \"http://rapid.eik.bme.hu:9080/currency_ws/currencies/\" + origin_tmp.lower() + \".json\"\n r1 = requests.get(origin_url)\n var1 = r1.json()\n origin_currency = var1['currency']\n \n destination001 = result[4]\n destination_len = len(destination001)\n destination_tmp = destination001[destination_len-3:destination_len-1]\n destination_url = \"http://rapid.eik.bme.hu:9080/currency_ws/currencies/\" + destination_tmp.lower() + \".json\"\n r2 = requests.get(destination_url)\n var2 = r2.json()\n destination_currency = var2['currency']\n # Visszaterunk a JSON formatumu dictionary-vel,\n # ami mindent a megfelelo formatumban tarol\n return jsonify({\"description\": result[0],\n \"vehicle_type\": result[1],\n \"quantity\": result[2],\n \"origin\": result[3],\n \"destination\": result[4],\n \"order_date\": result[5].date().isoformat(),\n \"deadline_date\": result[6].date().isoformat(),\n \"comment_text\": result[7],\n \"origin_currency\": origin_currency,\n\"destination_currency\": destination_currency})\n finally:\n cur.close()\n finally:\n conn.close()", "def order(self, order_id, symbol, **kwargs):\n pass", "def get_orderId(self):\n return self.metadata['orderId']", "def nextValidId(self, orderId):\n if getattr(self, '_my_orderid_data', None) is None:\n ## getting an ID which we haven't asked for\n ## this happens, IB server just sends this along occassionally\n self.init_nextvalidid()\n\n self._my_orderid_data.put(orderId)", "def update_specific_order(self,status,order_id):\n self.query = \"UPDATE orders SET order_status=%s WHERE order_id=%s\"\n self.input = (status,order_id) #tuple to support indexing\n self.query_1 = \"SELECT order_id FROM orders ORDER BY order_id DESC LIMIT 1.\"\n self.query_2 = \"SELECT * FROM orders WHERE order_id=%s\"\n self.input_2 = (order_id,) #tuple to support indexing\n self.event = \"admin_update_specific_order\"\n self.error = \"Invalid order id\"\n self.message = \"Successfully updated the order.\"\n self.order_id = order_id\n self.db_error = None", "def client_order_id(self, client_order_id):\n\n self._client_order_id = client_order_id", "def setOrder(self, order):\n\t\tself.orderInData = order", "def test_get_specific_order(self):\n # Test with wrong parcel id\n # Correct format but not there\n response = self.client.get(\n 'api/v1/parcels/24034', headers=self.user_token_dict)\n data = json.loads(response.data)\n self.assertEqual(\n data, {'message': 'No Parcel delivery order with that id'})\n self.assertEqual(response.status_code, 400)\n # Test with wrong parcel id format\n response = self.client.get(\n 'api/v1/parcels/24034u', headers=self.user_token_dict) # Incorrect id format\n data = json.loads(response.data)\n self.assertEqual(data, {'message': 'Wrong id format'})\n self.assertEqual(response.status_code, 400)", "def get_order(self, walletId, orderId):\n return", "def set_AWSMerchantId(self, value):\n super(ListOrdersInputSet, self)._set_input('AWSMerchantId', value)", "def _set_id(self, value):\n pass", "def validate_order_id(self, value):\n\n if not Order.objects.filter(order_id=value).exists():\n raise ValidationError(f'Order with id {value} does not exist.')\n order_obj = Order.objects.get(order_id=value)\n if order_obj.assign_time is None:\n raise ValidationError(f'Order with id {value} was not assigned to any courier.')\n if order_obj.complete_time is not None:\n raise ValidationError(f'Order with id {value} has already been completed.')\n return value", "def __init__(self, order_id):\n self.order_items = []\n self.order_id = order_id", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def _generate_order_number(self):\n return uuid.uuid4().hex.upper()", "def create_order(request, order, transaction_id):\n\n\n order.transaction_id = transaction_id\n print transaction_id\n #order.ip_address = request.META.get('REMOTE_ADDR')\n order.user = None\n #if request.user.is_authenticated():\n # order.user = request.user\n order.status = Order.SUBMITTED\n\n DBSession.add(order)\n\n\n if order:\n \"\"\" if the order save succeeded \"\"\"\n cart_items = cart.get_cart_items(request).all()\n print \"The items in the cart are: \", len(cart_items)\n\n for ci in cart_items:\n \"\"\" create order item for each cart item \"\"\"\n\n print \"The product is \", ci.product\n oi = OrderItem()\n oi.order_id = order.id\n oi.order = order\n oi.quantity = ci.quantity\n print \"The product id is \", ci.product.id\n oi.product_id = ci.product.id\n oi.product = ci.product\n\n oi.price = ci.price # now using @property\n DBSession.add(oi)\n\n # all set, clear the cart\n cart.empty_cart(request)\n\n ## save profile info for future orders\n #if request.user.is_authenticated():\n # from ecomstore.accounts import profile\n #\n # profile.set(request)\n\n return order", "def _generate_order_id():\n current_milli_time = str(int(round(time.time())))\n rand_str = random_string_generator()\n\n return '%s%s' % (rand_str, current_milli_time)", "def modify_order(self, order_id, price, volume):\n if order_id in self.orders:\n for instance in price, volume:\n if not isinstance(instance, int):\n raise TypeError('{0} is not int istance'.format(instance))\n if self.orders[order_id].price != price:\n self.orders[order_id].price = price\n if self.orders[order_id].volume != volume:\n self.orders[order_id].volume = volume\n else:\n raise KeyError('{0} is not in orders dict'.format(order_id))", "async def get_order_by_id(request: web.Request, order_id) -> web.Response:\n return web.Response(status=200)", "def send_orders_assign(order_ids, assign_time):\n if assign_time is None or len(order_ids) == 0:\n return make_response(jsonify({\"orders\": []}), 200)\n ids = [{\"id\": i} for i in order_ids]\n return make_response(jsonify({\"orders\": ids, \"assign_time\": assign_time}), 200)", "def order_item_id(self):\n return self._order_item_id", "def get_order(self, order, fields_name, many_to_many_fields):\n next_direction = '' if order[:1] == '-' else '-'\n real_order = ''\n field = ''\n if order[1:] == 'pk' or order == 'pk':\n real_order = order\n field = 'pk'\n else:\n if order[1:] in fields_name or order in fields_name:\n if order[1:] in many_to_many_fields or order in many_to_many_fields:\n real_order = 'pk'\n field = 'pk'\n else:\n real_order = order\n field = order if next_direction == '-' else order[1:]\n else:\n real_order = 'pk'\n field = 'pk'\n return next_direction, field, real_order", "def orderInfo(self, orderInfo):\r\n\r\n self._orderInfo = orderInfo", "def order_item_id(self, order_item_id):\n if order_item_id is None:\n raise ValueError(\"Invalid value for `order_item_id`, must not be `None`\")\n\n self._order_item_id = order_item_id", "def reducer_data_cleaning(self, order_id, product_id_arr):\n order_dict = {}\n product_list = list()\n for product_id in product_id_arr:\n product_list.append(str(product_id))\n if order_id != 'Member_number':\n order_dict[order_id] = product_list\n yield order_dict, 1", "def send_order(self, p_order, p_in_out, count):\n pass", "def SaveOrder(self, order, tenantId, userId):\n\t\tif order:\n\t\t\tif order[\"Id\"]:\n\t\t\t\torderid = order[\"Id\"]\n\t\t\t\to = self.GetOrderById(orderid, tenantId)\n\t\t\t\tif o:\n\t\t\t\t\t#o.TenantId = tenantId\n\t\t\t\t\to.CustomerId = order[\"CustomerId\"]\n\t\t\t\t\to.OrderAmount = order[\"OrderAmount\"]\n\t\t\t\t\to.PaidAmount = order[\"PaidAmount\"]\n\t\t\t\t\to.IpAddress = order['IpAddress']\n\t\t\t\t\tif order['DueDate'] and len(order['DueDate']) > 0:\n\t\t\t\t\t\to.DueDate = dateutil.parser.parse(order['DueDate'])\n\t\t\t\t\tif order['OrderDate'] and len(order['OrderDate']) > 0:\n\t\t\t\t\t\to.OrderDate = dateutil.parser.parse(order['OrderDate'])\n\t\t\t\t\to.UpdatedBy = userId\n\t\t\t\t\to.UpdatedOn = datetime.utcnow()\n\n\t\t\t\t\tlineitems = order[\"LineItems\"]\n\t\t\t\t\tif lineitems:\n\t\t\t\t\t\to.LineItemsCount = len(lineitems)\n\t\t\t\t\t\to.OrderAmount = sum([x[\"SellPrice\"] * x[\"Quantity\"] for x in lineitems])\n\t\t\t\t\t\tDBSession.query(LineItem).filter(LineItem.OrderId == orderid).delete()\n\t\t\t\t\t\tself.SaveOrderLineItems(o.Id, lineitems)\n\t\t\t\t\telse:\n\t\t\t\t\t\to.LineItemsCount = 0\n\t\t\t\t\t\to.OrderAmount = 0\n\t\t\t\t\t\tDBSession.query(LineItem).filter(LineItem.OrderId == orderid).delete()\n\n\t\t\t\t\tpayments = order[\"Payments\"]\n\t\t\t\t\tif payments:\n\t\t\t\t\t\to.PaidAmount = sum([x[\"PaidAmount\"] for x in payments])\n\t\t\t\t\t\tDBSession.query(OrderPayment).filter(OrderPayment.OrderId == orderid).delete()\n\t\t\t\t\t\tself.SaveOrderPayments(o.Id, payments, userId)\n\t\t\t\t\telse:\n\t\t\t\t\t\to.PaidAmount = 0\n\t\t\t\t\t\tDBSession.query(OrderPayment).filter(OrderPayment.OrderId == orderid).delete()\n\t\tpass", "def on_order(self, order: OrderData):\n # self.on_event(EVENT_ORDER, order)\n # self.on_event(EVENT_ORDER + order.vt_orderid, order)\n pass", "def place(self, order_params):\n\n # Prevent multiple invocations with the same OID.\n if self.oid() is not None:\n return self.oid()\n\n # Common params across all orders\n # https://docs.gdax.com/?python#orders\n data = {\n 'side': self.__side,\n 'type': self.__order_type,\n 'product_id': self.__product,\n }\n data.update(order_params)\n\n log.info('placing ORDER')\n self.__resp = httpapi.post(\n common.api_url + 'orders',\n data=json.dumps(data),\n auth=common.auth,\n )\n\n return self.oid(), self.__resp", "def order_created(order_id):\n order = Order.objects.get(id=order_id)\n sg = sendgrid.SendGridAPIClient(apikey=SENDGRID_API_KEY)\n subject1 = 'ะ—ะฐะผะพะฒะปะตะฝะฝั ะท ะผะฐะณะฐะทะธะฝัƒ ะœะตะฑะปั–-ะ›ะตะผ'\n message = 'ะ’ะฐัˆะต ะทะฐะผะพะฒะปะตะฝะฝั ะพั„ะพั€ะผะปะตะฝะพ. ะะพะผะตั€ ะฒะฐัˆะพะณะพ ะทะฐะผะพะฒะปะตะฝะฝั โ„– {}'.format(order.id)\n message += '\\n ะ— ะ’ะฐะผะธ ะทะฒ\\'ัะถะตั‚ัŒัั ะผะตะฝะตะดะถะตั€ \\n\\n ะ— ะฟะพะฒะฐะณะพัŽ, ะผะฐะณะฐะทะธะฝ \"ะœะตะฑะปั–-ะ›ะตะผ\"'\n content = Content(\"text/plain\", message)\n from_email = Email(ADMIN_EMAIL)\n to_email = Email(order.email)\n\n # subject2 = 'ะŸะพัั‚ัƒะฟะธะปะพ ะ—ะฐะผะพะฒะปะตะฝะฝั (ะœะตะฑะปั–-ะ›ะตะผ)'\n # message_admin = 'ะ—ะฐะผะพะฒะฝะธะบ {0} {1} ะท {2} \\n ะพั„ะพั€ะผะธะฒ ะทะฐะผะพะฒะปะตะฝะฝั โ„– {3}'.format(order.first_name,\n # order.last_name, order.address,\n # order.id)\n # message_admin += '\\n ะขะตะปะตั„ะพะฝ ะทะฐะผะพะฒะฝะธะบะฐ {}'.format(order.phone)\n # content_admin = Content(\"text/plain\", message_admin)\n mail = Mail(from_email, subject1, to_email, content)\n response = sg.client.mail.send.post(request_body=mail.get())\n\n return response", "def default_get(self, cr, uid, fields, context=None):\n if context is None:\n context = {}\n\n exchang_obj = self.pool.get('exchange.order')\n res ={}\n exchang_ids = context.get('active_ids', [])\n if not exchang_ids:\n return res\n\n result = []\n for req in exchang_obj.browse(cr, uid, exchang_ids, context=context):\n for product in req.order_line:\n result.append(self.__create_products(product))\n res.update({'products_ids': result})\n if 'current_date' in fields:\n res.update({'current_date': time.strftime('%Y-%m-%d %H:%M:%S')})\n return res", "def _compute_tax_id(self):\n for order in self:\n order.order_line._compute_tax_id()", "def _set_unique_id(self, json_request):\n values = []\n for field in value_fields:\n value = json_request.get(field, '')\n values.append(quote(self.fully_decode_uri(value), safe=''))\n if len(values) == 1:\n self.unique_id = values[0]\n elif len(values) == 2:\n self.unique_id = self.build_summary(values[0], values[1])\n elif len(values) == 1:\n self.unique_id = self.build_summary(values[0], values[1], values[2])", "def get_one_order():", "def update_order():", "def update_order():", "def get_order(self, order_id):\n for o in self.order_lst:\n if o.get_orderId() == order_id:\n return o", "def put(self, order_id):\n\n ###############\n # json_input = self.get_input()\n # log.pp(json_input)\n # key = 'request_id'\n # order_id = json_input.get(key)\n # if order_id is None:\n # error = \"Order ID parameter '%s': missing\" % key\n # return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n # else:\n # order_id = str(order_id)\n\n ###############\n log.info(\"Order id '%s' has to be restricted\", order_id)\n\n # Create the path\n log.info(\"Order request: %s\", order_id)\n imain = self.get_service_instance(service_name='irods')\n order_path = self.get_order_path(imain, order_id)\n log.debug(\"Order path: %s\", order_path)\n\n ###############\n error = \"Order '%s' not enabled or you have no permissions\" % order_id\n if not imain.is_collection(order_path):\n return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n else:\n metadata, _ = imain.get_metadata(order_path)\n key = 'restricted'\n if key not in metadata:\n return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n else:\n string = metadata.get(key)\n import json\n restricted_users = json.loads(string)\n # log.pp(restricted_users)\n if len(restricted_users) < 1:\n return self.send_errors(\n error, code=hcodes.HTTP_BAD_REQUEST)\n\n ###############\n obj = self.init_endpoint()\n if obj.username not in restricted_users:\n return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n\n ###############\n # irods copy\n label = \"%s_%s.%s\" % (obj.username, '123', 'zip')\n ipath = self.complete_path(order_path, label)\n self.stream_to_irods(imain, ipath)\n log.verbose(\"Uploaded: %s\", ipath)\n\n ###############\n # define zip final path\n from utilities import path\n filename = 'order_%s' % order_id\n # zip_file_name = path.append_compress_extension(filename)\n zip_ipath = path.join(order_path, filename, return_str=True)\n\n ###############\n # launch container\n self.ingest_restricted_zip(imain, order_id, zip_ipath, ipath)\n\n ###############\n response = {\n 'order_id': order_id,\n 'status': 'filled',\n }\n return self.force_response(response)", "def _getEntityId(self, order_item_id, batch_id, product_item_id):\n entity_name = ''\n for name in 'batch_id', 'order_item_id', 'product_item_id':\n id = eval(name)\n if id is not None:\n if entity_name:\n raise WorkflowError('Only one entity_id may be provided')\n entity_id = id\n entity_name = name\n if not entity_name:\n raise WorkflowError('No valid entity_ids were provided')\n return entity_name, entity_id", "def order_code(self, order_code):\n\n self._order_code = order_code", "def set_order(self, order):\n self.order = order", "def set_order(self, order):\n self.order = order", "def GetRecipientID(self, order):\n cursor = self.cursor\n\n manager_guid = order[\"manager\"][\"id\"]\n query = f'select id from recipient where guid=\\'{manager_guid}\\''\n cursor.execute(query)\n row = cursor.fetchone()\n if not row:\n return None\n managerid = row[0]\n\n if (not order[\"agent\"]):\n lname = order[\"customer\"][\"lname\"]\n fname = order[\"customer\"][\"fname\"]\n mname = order[\"customer\"][\"mname\"]\n if not mname: mname = \"\"\n phone = order[\"customer\"][\"phone\"]\n email = order[\"customer\"][\"email\"]\n query = '''\nselect id from dbo.[Recipient] where upper(rtrim([name]))=? and ltrim([phone])=? and [email]=?\n'''\n values = ((lname + ' ' + fname + ' ' + mname).upper().rstrip(), phone, email)\n cursor.execute(query, values)\n row = cursor.fetchone()\n if (not row):\n selector = '''\ninsert into dbo.[Recipient]\n([name], [recipienttypeid], [phone], [rtypeid], [legalgroupid], [metaphone], [managerid], [email])\nvalues (?,?,?,?,?,?,?,?)\n'''\n values = (\n (lname + ' ' + fname + ' ' + mname).rstrip(), 8, phone, 1, 10,\n (lname + fname + mname + phone).lower(),\n managerid, email)\n cursor.execute(selector, values)\n\n cursor.execute(\"select IDENT_CURRENT('recipient')\")\n row = cursor.fetchone()\n id = row[0]\n\n else:\n guid = order[\"agent\"][\"id\"]\n query = f'select id from dbo.Recipient where guid=\\'{guid}\\''\n\n cursor.execute(query)\n row = cursor.fetchone()\n if (not row):\n name = order[\"agent\"][\"name\"]\n query = 'insert into dbo.Recipient ([name], [rtypeid], [recipienttypeid], [metaphone],[managerid], [guid]) values (?,?,?,?,?,?)'\n values = (name, 2, 13, name.lower(), managerid, guid)\n cursor.execute(query, values)\n cursor.execute(\"select IDENT_CURRENT('recipient')\")\n row = cursor.fetchone()\n id = row[0]\n\n return id", "def ProcessOrder(product_id):\n product = Product.query.filter_by(product_id = product_id)\n \n if (product):\n product.qty = product \n db.session.commit()", "def save_object(self, data):\n return OrderDetails(**data)", "def get_order_by_id(self, order_id):\n if order_id in self.orders:\n order = self.orders.get(order_id)\n return (order.type, order.price, order.volume)\n raise KeyError('{0} is not in orders dict'.format(order_id))", "def create_order():", "async def test_retrieve_order_by_id(self):\n order = {\n 'id': '46871284',\n 'type': 'ORDER_TYPE_BUY_LIMIT',\n 'state': 'ORDER_STATE_PLACED',\n 'symbol': 'AUDNZD',\n 'magic': 123456,\n 'platform': 'mt5',\n 'time': '2020-04-20T08:38:58.270Z',\n 'openPrice': 1.03,\n 'currentPrice': 1.05206,\n 'volume': 0.01,\n 'currentVolume': 0.01,\n 'comment': 'COMMENT2'\n }\n client.get_order = AsyncMock(return_value=order)\n actual = await api.get_order('46871284')\n assert actual == order\n client.get_order.assert_called_with('accountId', '46871284')", "def update_order(self, order):\n order.order_id = self.order_id\n order.average_price = self.avg_execution_price\n order.symbol = self.symbol\n order.side = self.side\n order.type = self.order_type\n order.amount = self.original_amount\n order.price = self.price\n order.filled = self.executed_amount\n order.remaining = self.remaining_amount\n if self.is_cancelled:\n order.status = exchanges.Order.Status.CANCELLED\n elif self.is_live:\n order.status = exchanges.Order.Status.OPEN\n else:\n order.status = exchanges.Order.Status.CLOSED\n return order", "def test_admin_change_order_status(self):\n # Test unregistered id\n # Correct format but not there\n response = self.client.put(\n 'api/v1/parcels/35420', headers=self.admin_token_dict)\n data = json.loads(response.data)\n self.assertEqual(response.status_code, 400)\n self.assertEqual(\n data, {'message': 'No Parcel delivery order with that id'})\n # Test invalid format id\n response = self.client.put(\n 'api/v1/parcels/35uh420', headers=self.admin_token_dict) # Incorrect id format\n data = json.loads(response.data)\n self.assertEqual(response.status_code, 400)\n self.assertEqual(data, {'message': 'Wrong id format'})", "def __orderAddItem(self, order, item):\n cursor = self.__db.cursor()\n iID = self.__id.getID(\"orderitem\")\n cursor.execute(\"INSERT INTO `orderItems` (`insertionID`, `orderID`, `itemID`) VALUES (%s, %s, %s);\",\n (iID, order, item))\n return iID", "def on_order(self, order: OrderData):\n pass", "def on_order(self, order: OrderData):\n pass", "def on_order(self, order: OrderData):\n pass", "def place(self, order):\n assert isinstance(order, Order)\n order_id = self.__order_count\n self.__pending_orders[order_id] = order\n self.__order_count = order_id + 1\n return order_id", "def get_orderno(self):\n WebDriverWait(self.driver, 20).until(EC.visibility_of_element_located((By.CSS_SELECTOR,'#order-no')))\n order_id=self.driver.find_element_by_css_selector('#order-no').text\n return order_id", "def onchange_invoice_id(self):\n # self.invoice_id = False\n # self.base_amount = 0.0\n # self.wh_src_rate = 5.0\n if self._context is None:\n context = {}\n res = {}\n inv_obj = self.env['account.invoice']\n if not self.invoice_id:\n return {'value': {\n 'invoice_id': False,\n 'base_amount': 0.0,\n 'wh_src_rate': 0.0,\n 'wh_amount': 0.0, }\n }\n\n inv_brw = inv_obj.browse(self.invoice_id.id)\n base_amount = self.base_amount or inv_brw.amount_untaxed\n wh_src_rate = self.wh_src_rate or inv_brw.wh_src_rate or 5.0\n wh_amount = base_amount * wh_src_rate / 100.0\n res = {'value': {\n 'base_amount': base_amount,\n 'wh_src_rate': wh_src_rate,\n 'wh_amount': wh_amount,\n }\n }\n return res", "def update_order(request):\n order = request.session.get('order', {})\n quantity = int(request.POST.get('quantity'))\n sizeID = str(request.POST.get('sizeID'))\n colorID = str(request.POST.get('colorID'))\n productID = str(request.POST.get('productID'))\n order_item_identifier = productID + \" \" + colorID + \" \" + sizeID\n if quantity > 0:\n order[order_item_identifier] = quantity\n print(str(order_item_identifier))\n else:\n order.pop(order_item_identifier)\n request.session['order'] = order\n return redirect(reverse('orders'))", "def on_order(self, order: OrderData):\n # print(\"on_order\")\n # print(order)\n pass", "def update_order(self, orderId, order_item):\n \n for order in self.order_lst:\n if int(order.get_orderId()) == int(orderId):\n order.add_item(order_item)\n return None\n \n new_order = Order(orderId)\n new_order.add_item(order_item)\n self.order_lst.append(new_order)", "def __init__(self, mode, _id=None, order_dict=None, ):\n if _id:\n if type(_id) == str:\n _id = ObjectId(_id)\n self.order = orders_table.find_one(\n {\"_id\": _id})\n # self.deleted = True if self.order else False\n if order_dict:\n self.order = order_dict\n # self.order = orders_table.find_one(\n # {\"_id\": order_dict[\"_id\"]})\n # self.deleted = True if self.order else False", "def save(self, *args, **kwargs):\n self.order_total = self.membership.price\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def get_raw_hash(cls, order_item):\n obj = copy.deepcopy(order_item)\n obj.order_id = None\n obj.order_version = None\n raw_order_id = yeti_utils_common.generate_id_md5_digit_20_for_object(obj)\n return raw_order_id", "def send_orders_created(order_ids):\n ids = [{\"id\": i} for i in order_ids]\n return make_response(jsonify({\"orders\": ids}), 201)", "def _create_add_message_from_order(order):\n message = {}\n message.update({\"message-type\": \"A\"})\n message.update({\"instrument\": order.instrument})\n message.update({\"order-id\": order.order_id})\n message.update({\"price\": int(order.price)})\n message.update({\"quantity\": int(order.quantity)})\n message.update({\"side\": side_to_str(order.side)})\n message.update({\"timestamp\": order.timestamp})\n message.update({\"snapshot\": 1})\n return message", "def create_obj_id_for_query(id_dict: Dict) -> Text:\n return \",\".join([f\"{key}={value}\" for key, value in id_dict.items()])", "def save_object(self, data):\n return Order(**data)", "def prepare_order(acct, order):\n myaddr = (acct.address).lower()\n order[\"makerAddress\"] = myaddr\n order_struct = jsdict_order_to_struct(order) \n sig = _sign_order(acct, order_struct)\n order_struct[\"signature\"] = sig\n js_order = order_to_jsdict(order_struct)\n js_order[\"exchangeAddress\"] = exchangeAddress\n return js_order", "def set_ID(self, x):\n x = str(x)\n if self.ID != x:\n self.ID = x", "def orders(self, orders):\n\n self._orders = orders", "def orders(self, orders):\n\n self._orders = orders", "def set_document_order(self, order):\n self.set_value_into_input_field(self.order_text_field_locator, order)", "def identity(self, *args, **kwargs):\n return {\n 'id': self.drone_id,\n }", "def put(self, order_id):\n body = request.get_json()\n order = db.session.query(models.Order).filter_by(id=order_id).first()\n if order is None:\n return 'Order id not found', 400\n borrower = body.get('borrower')\n borrower = query_user_by_name(borrower)\n if borrower is None:\n return 'User does not exit in the system', 404\n # if invalid_user(borrower.username):\n # return 'Unauthorized user, please login as a user/borrower', 401\n copy_id = body.get('copy_id')\n print(body)\n print(copy_id)\n copy = db.session.query(models.Copy).filter_by(id=copy_id).first()\n if copy is None:\n return 'Copy ID {} not found in system'.format(copy_id), 409\n elif copy.id != copy_id and copy.status == BOOK_COPY_STATUS_UNAVAILABLE:\n return 'The copy of the book is not available', 400\n copy_owner = body.get('copy_owner')\n owner = query_user_by_name(copy_owner)\n if owner is None:\n return 'Copy owner not found in the system'.format(copy_owner), 409\n # return_date = body.get('return_date')\n # if datetime.strptime(return_date, \"%y%m%d\") < datetime.strptime(datetime.utcnow().strftime(\"%Y-%m-%d\"), \"%y%m%d\"):\n # return 'Return date should be later than today', 400\n status = body.get('order_status')\n if status is not None and status < 0 or status > 4:\n return 'Status should between 0-4', 400\n order.parse_body_status(body)\n copy = db.session.query(models.Copy).filter_by(id=order.copy).first()\n if order.status == ORDER_STATUS_COMPLETED or order.status == ORDER_STATUS_DECLINED:\n copy.status = BOOK_COPY_STATUS_AVAILABLE\n else:\n copy.status = BOOK_COPY_STATUS_UNAVAILABLE\n db.session.commit()\n return order.serialize(), 200", "async def on_order_updated(self, order: MetatraderOrder):\n for i in range(len(self._orders)):\n if self._orders[i]['id'] == order['id']:\n self._orders[i] = order\n break\n else:\n self._orders.append(order)", "def set_room_order(self, room_orders):\n orders = []\n self.room_orders = ';'.join(['%d-%d' % \\\n (item[0], item[1]) for item in room_orders.items()])", "def get_order(self, order_id):\n request = OrdersGetRequest(order_id)\n #3. Call PayPal to get the transaction\n response = self.client.execute(request)\n return response\n #4. Save the transaction in your database. Implement logic to save transaction to your database for future reference.", "def get_actual_id(translated):", "def sender_order(self, sender_order):\n\n self._sender_order = sender_order", "def get_order_detail(orderid): \n data = order_obj.get_order_detail(orderid)\n return data", "def id(self, *args, **kwargs) -> Any:\n pass", "def _add_id(self, attrs):\n _id = {}\n _id['id'] = str(attrs.get('name', ''))\n _id['valid_from'] = (\n _get_date_from_string(attrs.get('validFrom', '')))\n _id['created'] = (\n _get_date_from_string(attrs.get('created', '')))\n _id['device'] = str(attrs.get('device', ''))\n self._ids[str(attrs.get('name', ''))] = _id", "def order(self, order):\n self._order = order", "def get_order_number(self):\n return self.__order_number", "def onchange_product_id(self):\n if not self.product_id:\n self.bom_id = False\n elif not self.bom_id or self.bom_id.product_tmpl_id != self.product_tmpl_id or (self.bom_id.product_id and self.bom_id.product_id != self.product_id):\n bom = self.env['mrp.bom']._bom_find(product=self.product_id, picking_type=self.picking_type_id, company_id=self.company_id.id, bom_type='normal')\n if bom:\n self.bom_id = bom.id\n self.product_qty = self.bom_id.product_qty\n self.product_uom_id = self.bom_id.product_uom_id.id\n else:\n self.bom_id = False\n self.product_uom_id = self.product_id.uom_id.id", "def __init__(self, order_id, order_type, orderer_id, orderer_address, distance_to_restaurant,\n restaurant_id, restaurant_address, timestamp):\n self.order_id = order_id\n self.order_type = order_type\n self.timestamp = timestamp\n self.date_created = datetime.datetime.now() # Sets the date/time record is created\n self.orderer_id = orderer_id\n self.orderer_address = orderer_address\n self.distance_to_restaurant = distance_to_restaurant\n self.restaurant_id = restaurant_id\n self.restaurant_address = restaurant_address", "def to_id(self):\n return \"%s%s%s%s%s\" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,\n self.start_time,\n NoOpTraceId.DELIMITER, self.__number)", "def fill_order(self, order: Order) -> None:\n order = self.get_order_by_id(order.id)\n order.status = OrderStatus.FILL", "def getId(self):" ]
[ "0.65247154", "0.64482886", "0.64482886", "0.64482886", "0.6383491", "0.6324064", "0.6154994", "0.59831274", "0.59831274", "0.5879656", "0.5852313", "0.5837288", "0.5810877", "0.57971513", "0.57876205", "0.57637125", "0.5755213", "0.56966513", "0.5617328", "0.55985826", "0.5566129", "0.5556084", "0.5535168", "0.5475102", "0.5467566", "0.5467566", "0.5467566", "0.54315114", "0.539514", "0.53851867", "0.53850746", "0.5372526", "0.5365835", "0.5357535", "0.53557736", "0.5337112", "0.53348887", "0.53169876", "0.5306357", "0.52918863", "0.52911836", "0.52843195", "0.52645373", "0.5261077", "0.5249007", "0.5232849", "0.52237105", "0.52237105", "0.52236956", "0.5222278", "0.52027756", "0.52000976", "0.5180203", "0.5180203", "0.5180103", "0.5179085", "0.51732314", "0.51684445", "0.51639557", "0.515862", "0.5139651", "0.51347303", "0.5130714", "0.5107339", "0.5107339", "0.5107339", "0.51046133", "0.5102421", "0.50945085", "0.5093826", "0.50910294", "0.5090254", "0.5090186", "0.5087901", "0.5083393", "0.50771415", "0.5058003", "0.505444", "0.50509", "0.50422966", "0.502444", "0.50192285", "0.50192285", "0.5014796", "0.50066125", "0.5000686", "0.49975926", "0.49896792", "0.4986455", "0.49829993", "0.497492", "0.49611565", "0.4960908", "0.49576476", "0.49494264", "0.49482182", "0.49469405", "0.4946798", "0.49387398", "0.49372283", "0.49345356" ]
0.0
-1
Send null value in Amount fields
def test_09(self): assert 'False' == Api.requestBlock('test-09')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def get_amount(self):\n\t\tif self.amount is not None:\n\t\t\treturn self.amount\n\t\treturn abort(400, {\"message\" : \"please provide the amount to process\"})", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def nullValueToNan(self) -> None:\n self.cpp.nullValueToNan()", "def test_empty_input():\n assert _currency_column_to_numeric(\"\") == \"ORIGINAL_NA\"", "def getamount(self):\n return self.__amount", "def safe_format_amount(commodity, amount):\n if commodity is None:\n return str(amount)\n return commodity.format_amount(amount)", "def none_to_empty(data):\n return data if data is not None else ''", "def fillna_method(request: Any) -> Any:\n return request.param", "def fillna_method(request: Any) -> Any:\n return request.param", "def _prepare_add_missing_fields(self, values):\n res = {}\n onchange_fields = ['name', 'price_unit', 'product_uom', 'tax_id']\n if values.get('order_id') and values.get('product_id') and any(f not in values for f in onchange_fields):\n line = self.new(values)\n line.product_id_change()\n for field in onchange_fields:\n if field not in values:\n res[field] = line._fields[field].convert_to_write(line[field], line)\n res['init_qty'] = values.get('product_uom_qty')\n _logger.debug(\"********************* dropship_portal\\sale_order res **********************: %r\", res)\n return res", "def __init__(self):\n self.account_balance = 0\n self.amount = 0", "def format_field(self, value, format_spec):\n if value is None:\n return format(value)\n else:\n return super(NoneFormatter, self).format_field(value, format_spec)\n if value is None:\n return format(value)\n else: raise e", "def onchange_amount(self, cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None):\n if not context:\n context = {}\n default = super(account_voucher, self).onchange_amount(cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=context)\n if 'value' in default:\n amount = 'amount' in default['value'] and default['value']['amount'] or amount\n amount_in_word = amount_to_text(amount)\n default['value'].update({'amount_in_word':amount_in_word})\n if journal_id:\n allow_check_writing = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context).allow_check_writing\n default['value'].update({'allow_check':allow_check_writing})\n return default", "def nullValueToZero(self) -> None:\n self.cpp.nullValueToZero()", "def __init__(self, *args, **kwargs):\n kwargs['max_digits'] = kwargs.get('max_digits', 12)\n kwargs['decimal_places'] = kwargs.get('decimal_places', 2)\n super(MoneyField, self).__init__(*args, **kwargs)", "def __init__(self,\n amount=None,\n currency=None,\n **kwargs\n\n ):\n self.amount = amount\n self.currency = currency", "def clean_amount(self):\n if self.payer_channel == 2: # ignore balance check if not using wallet\n return self.cleaned_data['amount']\n else:\n pay_amount = self.cleaned_data.get('amount')*100\n payer_wallet = Wallet.objects.filter(wallet_id=self.cleaned_data.get('payer_method')).first()\n if payer_wallet is None:\n raise forms.ValidationError(\n self.error_messages['payer wallet unavailable'],\n code='payer wallet unavailable'\n )\n else:\n payer_balance = payer_wallet.balance\n if pay_amount > payer_balance:\n raise forms.ValidationError(\n self.error_messages['no_enough_balance'],\n code='no_enough_balance'\n )\n else:\n return self.cleaned_data['amount']", "def test_add_none_field(self):\n user_id = get_rand_string()\n data = get_rand_string()\n id = get_rand_string()\n\n doc = {}\n doc[\"user_id\"] = user_id\n doc[\"data\"] = data\n doc[\"id\"] = id\n doc[\"num\"] = None\n\n self.conn.add(**doc)", "def test_post_cve_id_empty_amount(reg_user_headers):\n res = requests.post(\n f'{env.AWG_BASE_URL}{CVE_ID_URL}',\n headers=reg_user_headers,\n params={\n 'amount': '',\n 'batch_type': 'sequential',\n 'cve_year': f'{utils.CURRENT_YEAR}',\n 'short_name': 'mitre'\n }\n )\n assert res.status_code == 400\n response_contains_json(res, 'error', 'BAD_INPUT')", "def no_payment_required(self):", "def to_python(self, value):\n if not value or value is models.fields.NOT_PROVIDED:\n value = None\n elif not isinstance(value, Decimal):\n try:\n value = Decimal(value)\n except InvalidOperation:\n value = None\n return value", "def silent_none(value):\n if value is None:\n return ''\n return value", "def clearBidData(self):\n self.txtAmount.setText('')\n self.txtMin.setText('')\n self.txtMax.setText('')\n self.txtMin.readOnly = 1\n self.txtMax.readOnly = 1\n self.lstType.clearSelection()\n self.lstRes.clearSelection()\n self.btnAddMarketOrder.disable()\n self.btnCancelOrder.disable()", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def getAmount(self):\n return self.amount", "def validate_fields_for_magento(self,data):\n for field in data:\n if data[field] == None :\n del data[field]\n if data[field] == True:\n data[field] = 1\n if data[field] == False :\n data[field] = 0", "def is_empty(self):\n return self.amount == 0", "def test_format_phone_none(self):\n number1 = None\n self.assertEqual(format_phone(number1), None)", "def noneType(value):\r\n return ''", "def amount(self, amount):\n if self.local_vars_configuration.client_side_validation and amount is None: # noqa: E501\n raise ValueError(\"Invalid value for `amount`, must not be `None`\") # noqa: E501\n\n self._amount = amount", "def test_charge_increment_model_initialize_with_no_elements(self):\n ForceField(xml_charge_increment_model_formal_charges)", "def format_amount(self) -> str:\n if self.amount_debit != '':\n return self.amount_debit.replace('-', '')\n return self.amount_credit.replace('-', '')", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def get_prep_value(self, value):\n if (value is UNKNOWN) or (value is ''):\n # If Django tries to save an empty string, send the db None (NULL).\n return None\n else:\n # Otherwise, just pass the value.\n return value", "def null_values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OneDashboardPageWidgetBillboardNullValueArgs']]]]:\n return pulumi.get(self, \"null_values\")", "def form_InputNoneValue(request):\n schema = schemaish.Structure()\n schema.add('inputStrip', schemaish.String(default=''))\n\n form = formish.Form(schema, 'form')\n form['inputStrip'].widget = formish.Input(strip=True, none_value='BANG')\n return form", "def check_exchange_amount(exc):\n if \"amount\" not in exc:\n raise InvalidExchange\n if np.isnan(exc[\"amount\"]) or np.isinf(exc[\"amount\"]):\n raise ValueError(\"Invalid amount in exchange {}\".format(exc))", "def _value_if_not_fixed(o, d):\n if o.fixed:\n return ()\n return (\"value\",)", "def clear(self):\r\n self.firstname_value.set('')\r\n self.lastname_value.set('')\r\n self.id_number_value.set('')\r\n self.country_value.set('')", "def clean_razorpay_response(response: dict):\n response['amount'] = Decimal(response['amount']) / 100", "def _compute_amount_fields(self, amount, src_currency, company_currency):\n amount_currency = False\n currency_id = False\n date = self.env.context.get('date') or fields.Date.today()\n company = self.env.context.get('company_id')\n company = self.env['res.company'].browse(company) if company else self.env.user.company_id\n if src_currency and src_currency != company_currency:\n amount_currency = amount\n amount = src_currency._convert(amount, company_currency, company, date)\n currency_id = src_currency.id\n debit = amount > 0 and amount or 0.0\n credit = amount < 0 and -amount or 0.0\n return debit, credit, amount_currency, currency_id", "def allow_null_values(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_null_values\")", "def validate_empty_field(self, field, value):\n self.value = value\n self.field = field\n if self.value == \"\":\n message = \"{} field cannot be blank!\".format(self.field)\n raise GraphQLError(message)", "def __str__(self) -> str:\n return f'{self.amount}{self.currency}'", "def allow_null_values(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"allow_null_values\")", "def raw_value(self, value):\n if value is None:\n value = 0\n\n self.__value = value", "def test_total_missing_field(test_session):\n url = \"/total\"\n body = {}\n\n # Run the request.\n response = test_session.post(url, data=json.dumps(body), content_type=\"application/json\")\n\n assert response.status_code == 400\n assert response.json[\"error_message\"] == ERROR_MISSING_REQUIRED", "def test_serialize_none(self):\n self.assertEqual(serialize(None), 'null')", "def amount(self) -> Optional[pulumi.Input['BudgetAmountArgs']]:\n return pulumi.get(self, \"amount\")", "def _amount_all(self, cr, uid, ids,field_name, arg, context={}):\n res={}\n for record in self.browse(cr, uid, ids, context=context):\n res[record.id] = { 'amount_untaxed': 0.0, 'amount_tax': 0.0, 'amount_total': 0.0}\n amount_untaxed = 0.0\n amount_tax = 0.0\n amount_total = 0.0\n\t if not record.allowances_lines_after and record.allowances_lines_before:\n \tfor line in record.allowances_lines_before:\n \tamount_untaxed += line.amount_untaxed\n \tamount_tax += line.amount_tax\n \tamount_total += line.amount_total\n \tres[record.id]['amount_untaxed'] = amount_untaxed \n \tres[record.id]['amount_tax'] = amount_tax \n \tres[record.id]['amount_total'] = amount_total \n\n\t elif record.allowances_lines_after and record.allowances_lines_before :\n \tfor line in record.allowances_lines_after:\n \tamount_untaxed += line.amount_untaxed\n \tamount_tax += line.amount_tax\n \tamount_total += line.amount_total\n \tres[record.id]['amount_untaxed'] = amount_untaxed \n \tres[record.id]['amount_tax'] = amount_tax \n \tres[record.id]['amount_total'] = amount_total \n return res", "def set_amount(self, amount):\n self.amount = amount", "def ask(self, value):\n if value is not None:\n self._ask = Decimal(value)", "def test_null_as_null_indicator(self):\n self.custom_null_indicator_template('null')", "def save(self, *args, **kwargs):\n self.order_total = self.membership.price\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def _validate_beneficiary_customer_no_option_59(self, val):\n return val", "def getAmount1(*args):", "def getAmount(self):\n return self.base.get(\"amount\", [])", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def form_IntegerNoneDefault(request):\n schema = schemaish.Structure()\n schema.add('myIntegerField', schemaish.Integer())\n form = formish.Form(schema, 'form')\n form.defaults = {'myIntegerField':None}\n return form", "def coerce_empty_numeric_values(self):\n if \"numeric\" in self.annot_types:\n numeric_columns = self.file.xs(\n \"numeric\", axis=1, level=1, drop_level=False\n ).columns.tolist()\n self.file[numeric_columns].replace(\"\", np.nan, inplace=True)", "def notna(self):\n return super().notna()" ]
[ "0.60821736", "0.59436923", "0.5912167", "0.56709254", "0.56709254", "0.56709254", "0.55559736", "0.5472613", "0.54673105", "0.5452079", "0.54242957", "0.53854245", "0.53854245", "0.5326176", "0.5292764", "0.52759343", "0.5254463", "0.52461946", "0.5239521", "0.5234681", "0.5233621", "0.5222194", "0.5213156", "0.5205902", "0.5159277", "0.5157134", "0.51396024", "0.5129485", "0.5129485", "0.5108645", "0.5106944", "0.51055914", "0.51047164", "0.50951946", "0.50833833", "0.5082594", "0.5061629", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.50561506", "0.5048469", "0.50366545", "0.5036355", "0.5030543", "0.50303406", "0.50241816", "0.5023421", "0.50221753", "0.50193965", "0.5014593", "0.5004285", "0.4989857", "0.49811277", "0.49798417", "0.49735457", "0.49583572", "0.49543384", "0.49540228", "0.49505153", "0.49370056", "0.4935863", "0.49350247", "0.4934836", "0.49227685", "0.4922306", "0.4922306", "0.4922306", "0.4922306", "0.4922306", "0.4922306", "0.4922306", "0.4922306", "0.4922306", "0.49155316", "0.4909736", "0.49071392" ]
0.0
-1
Send value with dot in Amount fields
def test_10(self): assert 'False' == Api.requestBlock('test-10')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _amount(amount, asset='HBD'):\n assert asset == 'HBD', 'unhandled asset %s' % asset\n return \"%.3f HBD\" % amount", "def amount_ui(self) -> str:\n return \"{:,.2f}\".format(self.amount)", "def obtain_amount(cls, amount_string):\n return float(string.replace(amount_string, ',', '.'))", "def __str__(self) -> str:\n return f'{self.amount}{self.currency}'", "def _format_instructed_amount_33B(self, val):\n instructed_amount = val.get('instructed_amount')\n currency = val.get('currency')\n if instructed_amount and currency:\n instructed_amount = apply_currency_precision(currency, abs(float(instructed_amount)))\n val = str(currency) + str(FSwiftMLUtils.float_to_swiftmt(str(instructed_amount)))\n return val", "def getamount(self):\n return self.__amount", "def amount(self):\r\n return self._data['amount']", "def format_price(value: Decimal, order: Order, request: HttpRequest) -> str:\n context = {\n \"request\": request,\n \"order\": order,\n \"admin\": True,\n }\n return app_settings.SALESMAN_PRICE_FORMATTER(value, context=context)", "def format_amount(self) -> str:\n if self.amount_debit != '':\n return self.amount_debit.replace('-', '')\n return self.amount_credit.replace('-', '')", "def set_vat_amount(self, vat_amount):\n self.set_value_into_input_field(self.vat_amount_textbox_locator, vat_amount, True)", "def amount(self):\n\n\t\telement = Element(driver=self.driver,\n\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t locator=BillPayPageLocator.AMOUNT_INPUT)\n\t\treturn element.element_value", "def display_price(self):\n return '$ '+str(self.price)", "def adapt_decimal(value: t.Any) -> str:\n return str(value)", "def _exchange_amount(amount, rate):\n return '%.2f' % round(float(amount) * float(rate), 2)", "def get_amount(self):\n\t\tif self.amount is not None:\n\t\t\treturn self.amount\n\t\treturn abort(400, {\"message\" : \"please provide the amount to process\"})", "def update_total(self):\n self.objects[self.ids.AMOUNT].setText(\"Total Spend: \\xA3%.2f\" % (self.owner.total_price() / 100))", "def format_value(self, value: float) -> str:\r\n ...", "def format_value(self, value: float) -> str:\r\n ...", "def getAmount(self):\n return self.amount", "def getAmount2(*args):", "def set_adjustment_charge_amount(self, amount):\n self.script_executor(\"var element = document.getElementById('%s'); element.style.display = 'block';\" % self.charge_amount_textbox_id)\n self.set_value_into_input_field(self.adjustment_charge_amount_textbox_locator, amount)", "def set_amount(self, amount):\n self.amount = amount", "def _set_instructed_amount_33B(self, val):\n self.swift_obj.CurrencyInstructedAmount = val\n self.swift_obj.CurrencyInstructedAmount.swiftTag = \"33B\"", "def clean_amount(self):\n if self.payer_channel == 2: # ignore balance check if not using wallet\n return self.cleaned_data['amount']\n else:\n pay_amount = self.cleaned_data.get('amount')*100\n payer_wallet = Wallet.objects.filter(wallet_id=self.cleaned_data.get('payer_method')).first()\n if payer_wallet is None:\n raise forms.ValidationError(\n self.error_messages['payer wallet unavailable'],\n code='payer wallet unavailable'\n )\n else:\n payer_balance = payer_wallet.balance\n if pay_amount > payer_balance:\n raise forms.ValidationError(\n self.error_messages['no_enough_balance'],\n code='no_enough_balance'\n )\n else:\n return self.cleaned_data['amount']", "def test_convert_amounts(self):\n pass", "def _format_contract_number_partyA_21N(self, val):\n return val", "def getTransactionAmount(self,message):\n amount = re.findall(Analyzer.rgxAmount,message.lower())\n return amount[0].capitalize()", "def eur(value):\n float(value)\n return f\"โ‚ฌ{value:,.2f}\"", "def usd(value):\r\n return f\"${Decimal(value):,.2f}\"", "def update_account_value(msg):\n if msg is not None and msg.tag == 'TotalCashValue':\n print(\"Account %s, cash: %s %s\" % (msg.account, msg.value, msg.currency))", "def getAmount1(*args):", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def test_isolate_amount_returns_nested_amount(self):\n response = { 'data': { 'amount': '70.2' } }\n amount = isolate_amount(response)\n self.assertEqual(amount, 70.2)", "def set_total_amount_with_taxes(self, amount):\n self.set_value_into_input_field(self.total_amount_with_taxes_textbox_locator, amount, True)", "def format_amount(amount: int | str | float) -> str:\r\n # Remove .0 by casting to int\r\n if float(amount) % 1 == 0:\r\n amount = int(float(amount))\r\n\r\n # Adding prefix + for positive number and 0\r\n if not str(amount).startswith('+') and float(amount) >= 0:\r\n amount = str(f'+{amount}')\r\n\r\n # return as string\r\n return str(amount)", "def getAmount(self):\n return self.base.get(\"amount\", [])", "def currency_to_protocol(amount):\n if type(amount) == float:\n amount = \"%.8f\" % amount\n\n return int(amount.replace(\".\", '')) # avoiding float math", "def render_money(amount: Money, message: str = \"\") -> str:\n\n return f\"{message} {amount.amount} {amount.currency}\"", "def get_tx_amount():\n return float(input(\"Enter Transaction Amount: \"))", "def amount(self):\n return self.__amount", "def amount(self):\n return self.__amount", "def update_total_price():\n tk_total_price.set('Total: {0:>6}'.format(str(total_price)))\n print(total_price)", "def get_total_display(self):\n total = self.total\n return '%.2f\\N{euro sign}' % total", "def _format_details_of_charges_71A(self, val):\n return val", "def amount_on_field(self):\n\n query = \"\"\" SELECT SUM(field_entry.value::DOUBLE PRECISION)\n FROM crowdataapp_documentsetfieldentry field_entry\n INNER JOIN crowdataapp_documentsetformentry form_entry ON form_entry.id = field_entry.entry_id\n INNER JOIN crowdataapp_document document ON document.id = form_entry.document_id\n WHERE document.document_set_id = %d\n AND field_entry.verified = TRUE\n AND field_entry.field_id = %d\"\"\" % ( self.id, self.tosum_field.id)\n\n cursor = connection.cursor()\n cursor.execute(query)\n\n amount = cursor.fetchall()[0][0]\n\n return amount", "def clean_value(self, value):\n return float(value.replace('.', '').replace(',', '.'))", "def amount(self):\n return(self.order_master.amount)", "def format_usd(my_price):\n return f\"${my_price:,.2f}\"", "def _format_senders_reference_20(self, val):\n if val:\n sett_obj = acm.FSettlement[str(val)]\n val = \"%s-%s-%s-%s\" % (get_settlement_reference_prefix(), str(val), str(get_message_version_number(sett_obj)), str(self.swift_message_type[2:5]))\n return val", "def amount(self):\n return self._amount", "def ParseAmount(am):\n\n ParseAmount.valid = True\n # filter\n am = re.sub('[^0-9,.-]','',am)\n # integers(any number).fraction(0..2) \n # find decimal point\n frac1 =len(am)-am.find('.')\n frac2 =len(am)-am.find(',')\n # No grouping & No fraction / decimal-point\n if (frac1 == frac2):\n am = '%s.00'% am\n # xxx,xxx,xxx.xx comma-grouping, dot-decimal\n elif (frac1 < 4) and (frac1 > 0): \n am = am.replace(',','')\n # xxx.xxx.xxx,xx dot-grouping, comma-decimal\n elif (frac2 < 4) and (frac2 > 0):\n am = am.replace('.','')\n am = am.replace(',','.') # harmonize decimal-point\n # grouping & No fraction / decimal-point\n else:\n am = am.replace(',','')\n am = am.replace('.','')\n am = '%s.00'% am\n # check validity result\n if (len(am) - am.find('.')) != 3:\n ParseAmount.valid = False\n return am", "def _amount_line(self, cr, uid, ids, prop, unknow_none, unknow_dict):\n res = {}\n tax_obj = self.pool.get('account.tax')\n cur_obj = self.pool.get('res.currency')\n for line in self.browse(cr, uid, ids):\n price = line.price_unit * (1-(line.discount or 0.0)/100.0)\n taxes = tax_obj.compute_all(cr, uid, line.invoice_line_tax_id, price, line.quantity, product=line.product_id, partner=line.invoice_id.partner_id)\n res[line.id] = taxes['total'] + line.variation_amount\n if line.invoice_id:\n cur = line.invoice_id.currency_id\n res[line.id] = cur_obj.round(cr, uid, cur, res[line.id])\n return res", "def set_price(self, request, pk):\n return Response('20$')", "def set_price(self, request, pk):\n return Response('20$')", "def onchange_amount(self, cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None):\n if not context:\n context = {}\n default = super(account_voucher, self).onchange_amount(cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=context)\n if 'value' in default:\n amount = 'amount' in default['value'] and default['value']['amount'] or amount\n amount_in_word = amount_to_text(amount)\n default['value'].update({'amount_in_word':amount_in_word})\n if journal_id:\n allow_check_writing = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context).allow_check_writing\n default['value'].update({'allow_check':allow_check_writing})\n return default", "def text_transform(val):\n if CURRENCY == \"USD\":\n return \"$%d\" % val\n if CURRENCY == \"EUR\":\n return \"โ€Žโ‚ฌ%d\" % val\n if CURRENCY == \"GBP\":\n return \"ยฃ%d\" % val\n return \"%d\" % val", "def get_price(str_val):\n return float(str_val.replace('.', '').replace(',', '.'))", "def __CheckValue1(self, value, value2):\n if value[-1:] == '.':\n return value + '.'\n return value", "def test_normal_decimal_input(self):\r\n ws_leader = \"S. O'Neal (14.9)\"\r\n res = treat_input(ws_leader, type=\"float\")\r\n assert res == 14.9", "def monetary_amount_valid(record, field_name='price', min=1, max=10):\n monetary_amount = record[field_name]\n assert isinstance(monetary_amount, float)\n string_price = str(monetary_amount)\n decimal = string_price.split(\".\")[1]\n assert min <= monetary_amount <= max and len(decimal) <= 2", "def format_amount(amount):\n if not amount:\n return ''\n return \"{} {}\".format(format_currency(amount.number, amount.currency),\n amount.currency)", "def prepare_value(self, value):\n if value is None:\n return value\n value = value.replace(\" \", \"\").replace(\".\", \"\")\n if value:\n return \"%s.%s.%s.%s\" % (value[0:3], value[3:7], value[7:11], value[11:])\n return value", "def getUserCurrency():", "def order_promoter_cut_amount(obj):\n return '%s' % obj.order.promoter_cut_amount", "def raw(self, raw):\n self.uw.send('%s.val = %.4f' % (self.name, self.clipRawLimits(raw)))", "def find_send_amounts(input_text: str) -> float:\n regex = r'(?:^|\\s)(\\d*\\.?\\d+)(?=$|\\s)'\n matches = re.findall(regex, input_text, re.IGNORECASE)\n if len(matches) > 1:\n raise AmountAmbiguousException(\"amount_ambiguous\")\n elif len(matches) == 1:\n return float(matches[0].strip())\n raise AmountMissingException(\"amount_not_found\")", "def func(pct, allvals):\n return str(format(round(pct/100.*np.sum(allvals), 2),\".2f\")) + \"โ‚ฌ\"", "def send(ctx, address, amount, denomination, use_unconfirmed, verbose):\n if denomination == '':\n confirmed = click.confirm(uxstring.UxString.default_price_denomination, default=True)\n if not confirmed:\n raise exceptions.Two1Error(uxstring.UxString.cancel_command)\n denomination = currency.Price.SAT\n price = currency.Price(amount, denomination)\n return _send(ctx.obj['wallet'], address, price.satoshis, verbose, use_unconfirmed)", "def price(self, value):\n self._price = Decimal(value)", "def local_price(amount, currency):\n amt = convert(amount, currency)\n sym = symbol(currency)\n return f'{sym}{amt}'", "def __init__(self, *args, **kwargs):\n kwargs['max_digits'] = kwargs.get('max_digits', 12)\n kwargs['decimal_places'] = kwargs.get('decimal_places', 2)\n super(MoneyField, self).__init__(*args, **kwargs)", "def send_decimal_value(self, value: int, length: int = 24):\n self.send_message(b'\\x01' +\n value.to_bytes(4, 'big') +\n length.to_bytes(2, 'big'))", "def format_tuition(self, data):\n d = u'$%.2f' % data\n return d.replace('.00','')", "def type_amount(self, amount):\n\n\t\twith allure.step(\"Type amount\"):\n\t\t\telement = Element(driver=self.driver,\n\t\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t\t locator=BillPayPageLocator.AMOUNT_INPUT)\n\t\t\telement.write(amount)\n\t\t\treturn None", "def change_price(self, value): \n value = self.price", "def set_charge_amount(self, charge_amount):\n self.set_value_into_input_field(self.charge_amount_textbox_locator, charge_amount, True)", "def clean_razorpay_response(response: dict):\n response['amount'] = Decimal(response['amount']) / 100", "def format_as_usd(value):\n return f\"${value:,.2f}\"", "def _format_value(self, v):\n return \"%.4g\" % v", "def _fix_balance(self, balance):\n\n return float(balance.replace(',', '.').replace(' ', ''))", "def clean(amount):\n # Return empty input immediately.\n if not amount:\n return amount\n\n if re.search(r'[\\. ][0-9]{3},[0-9]{1,2}$', amount):\n # Assume amount is in 1.123,12 or 1 123,12 format (Dutch).\n return amount.replace('.', '').replace(' ', '').replace(',', '.')\n\n if re.search(r'[, ][0-9]{3}\\.[0-9]{1,2}$', amount):\n # Assume amount is in 1,123.12 format (Engels).\n return amount.replace(',', '').replace(' ', '')\n\n if re.search(r'[0-9](,[0-9]{1,2}$)', amount):\n # Assume amount is in 123,12 or in 123,1 format (Dutch).\n return amount.replace(',', '.')\n\n # Format of amount is not recognized. Return amount.\n return amount", "def test_default_w_decimals(self):\n self.assertEqual(currency(188.00), \"$188.00\")", "def exchange(currency_from,currency_to,amount_from):\n c_f=currency_from\n c_t=currency_to\n a_f=amount_from\n doc = urlopen('http://cs1110.cs.cornell.edu/2016fa/a1server.php?from='+c_f+'&to='+c_t+'&amt='+a_f)\n docstr = doc.read()\n doc.close()\n jstr = docstr.decode('ascii')\n list_total=list(jstr)\n list_number=[]\n begin_character=len(list(a_f))\n for item in list_total:\n if item.isdigit()==True or item=='.':\n list_number=list_number+[item]\n answerlist=list_number[begin_character:]\n answer=''\n for item in answerlist:\n answer=answer+item\n amount_to=float(answer)\n return amount_to", "def to_usd(my_price):\n return f\"${my_price:,.2f}\"", "def get_money(self):\n return self.money", "def get_inbound_statement_details_vat_amount(self):\n return self.get_text_from_element(self.inbound_statements_details_vat_amount_locator, True)", "def ask(self, value):\n if value is not None:\n self._ask = Decimal(value)", "def net_amount(self, net_amount):\n\n self._net_amount = net_amount", "def get_formated_price(\n amount: Decimal,\n precision: int = DEFAULT_DECIMAL_PLACES\n) -> str:\n return \"{:0.0{}f}\".format(amount, precision)", "def safe_format_amount(commodity, amount):\n if commodity is None:\n return str(amount)\n return commodity.format_amount(amount)", "def _strip_currency(amount):\n if amount[0] == '$':\n amount = amount[1:]\n if amount.find(\".\") != -1:\n return amount.replace(\".\", \"\")\n return amount", "def make_money(self):\n print(f'I\\'m making money buy Crypto currencies trading')", "def get_amount_line(self, txt_line, amount_exempt):\n ali_max = self.get_max_aliquot(txt_line)\n exempt = 0\n\n if ali_max == int(txt_line.tax_wh_iva_id.tax_id.amount * 100):\n exempt = amount_exempt\n total = (txt_line.tax_wh_iva_id.base + txt_line.tax_wh_iva_id.amount +\n exempt)\n return total, exempt" ]
[ "0.61124015", "0.59985334", "0.5889612", "0.58480203", "0.5810647", "0.5727278", "0.5675556", "0.5655621", "0.56539106", "0.56217325", "0.56098205", "0.5542181", "0.5542173", "0.5525029", "0.5482661", "0.5476994", "0.5462194", "0.5462194", "0.54560274", "0.5453152", "0.5451577", "0.54329044", "0.5416327", "0.53763545", "0.5371522", "0.5370698", "0.536749", "0.5364447", "0.5356375", "0.5355799", "0.5348012", "0.5338845", "0.5338845", "0.5338845", "0.5338845", "0.5338845", "0.5338845", "0.5338845", "0.5338845", "0.5338845", "0.53379136", "0.5335152", "0.53336805", "0.5332315", "0.5329464", "0.5322276", "0.53201795", "0.53151035", "0.53151035", "0.52984107", "0.5288199", "0.5284361", "0.5277702", "0.52510273", "0.5229484", "0.5228814", "0.5216022", "0.5198663", "0.51775604", "0.51700515", "0.5169976", "0.5169976", "0.51615363", "0.5155905", "0.5147054", "0.5143842", "0.51401454", "0.512601", "0.51144046", "0.5099372", "0.5092118", "0.5090302", "0.5089513", "0.5089283", "0.5058292", "0.50565773", "0.5050575", "0.5046233", "0.50437737", "0.50204384", "0.5017506", "0.5015562", "0.5009171", "0.50069296", "0.5000444", "0.49928218", "0.499132", "0.4989577", "0.49877787", "0.4985724", "0.4982243", "0.49820915", "0.4978385", "0.4966816", "0.4962737", "0.49614653", "0.49554726", "0.4954724", "0.49508643", "0.49493736", "0.49349737" ]
0.0
-1
Send different values in Amount fields
def test_11(self): assert 'False' == Api.requestBlock('test-11')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAmount1(*args):", "def onchange_amount(self, cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None):\n if not context:\n context = {}\n default = super(account_voucher, self).onchange_amount(cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=context)\n if 'value' in default:\n amount = 'amount' in default['value'] and default['value']['amount'] or amount\n amount_in_word = amount_to_text(amount)\n default['value'].update({'amount_in_word':amount_in_word})\n if journal_id:\n allow_check_writing = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context).allow_check_writing\n default['value'].update({'allow_check':allow_check_writing})\n return default", "def getAmount2(*args):", "def _amount_all(self, cr, uid, ids,field_name, arg, context={}):\n res={}\n for record in self.browse(cr, uid, ids, context=context):\n res[record.id] = { 'amount_untaxed': 0.0, 'amount_tax': 0.0, 'amount_total': 0.0}\n amount_untaxed = 0.0\n amount_tax = 0.0\n amount_total = 0.0\n\t if not record.allowances_lines_after and record.allowances_lines_before:\n \tfor line in record.allowances_lines_before:\n \tamount_untaxed += line.amount_untaxed\n \tamount_tax += line.amount_tax\n \tamount_total += line.amount_total\n \tres[record.id]['amount_untaxed'] = amount_untaxed \n \tres[record.id]['amount_tax'] = amount_tax \n \tres[record.id]['amount_total'] = amount_total \n\n\t elif record.allowances_lines_after and record.allowances_lines_before :\n \tfor line in record.allowances_lines_after:\n \tamount_untaxed += line.amount_untaxed\n \tamount_tax += line.amount_tax\n \tamount_total += line.amount_total\n \tres[record.id]['amount_untaxed'] = amount_untaxed \n \tres[record.id]['amount_tax'] = amount_tax \n \tres[record.id]['amount_total'] = amount_total \n return res", "def getamount(self):\n return self.__amount", "def test_convert_amounts(self):\n pass", "def _amount_all(self):\n for order in self:\n order.update({\n 'net_rate': order.basic_rate + order.extra_rate\n })", "def set_amount(self, amount):\n self.amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def _amount_all(self, cr, uid, ids,field_name, arg, context={}):\n res={}\n for record in self.browse(cr, uid, ids, context=context):\n val = 0.0\n for line in record.enrich_lines:\n if line.state == 'done' :\n val += line.cost\n res[record.id] = {\n 'paid_amount':val,\n 'residual_amount':record.amount - val,\n }\n return res", "def _compute_amount_fields(self, amount, src_currency, company_currency):\n amount_currency = False\n currency_id = False\n date = self.env.context.get('date') or fields.Date.today()\n company = self.env.context.get('company_id')\n company = self.env['res.company'].browse(company) if company else self.env.user.company_id\n if src_currency and src_currency != company_currency:\n amount_currency = amount\n amount = src_currency._convert(amount, company_currency, company, date)\n currency_id = src_currency.id\n debit = amount > 0 and amount or 0.0\n credit = amount < 0 and -amount or 0.0\n return debit, credit, amount_currency, currency_id", "def _amount_all_wrapper(self, cr, uid, ids, field_name, arg, context=None):\n return self._amount_all(cr, uid, ids, field_name, arg, context=context)", "def _amount_line(self, cr, uid, ids, prop, unknow_none, unknow_dict):\n res = {}\n tax_obj = self.pool.get('account.tax')\n cur_obj = self.pool.get('res.currency')\n for line in self.browse(cr, uid, ids):\n price = line.price_unit * (1-(line.discount or 0.0)/100.0)\n taxes = tax_obj.compute_all(cr, uid, line.invoice_line_tax_id, price, line.quantity, product=line.product_id, partner=line.invoice_id.partner_id)\n res[line.id] = taxes['total'] + line.variation_amount\n if line.invoice_id:\n cur = line.invoice_id.currency_id\n res[line.id] = cur_obj.round(cr, uid, cur, res[line.id])\n return res", "def get_amount(self):\n\t\tif self.amount is not None:\n\t\t\treturn self.amount\n\t\treturn abort(400, {\"message\" : \"please provide the amount to process\"})", "def fromAmountHandler(self):\n\n self.last_clicked = \"amount\"\n self.updateUI()\n self.last_clicked = \"\"", "def getAmount(self):\n return self.amount", "def _get_amount_value(\n self, cr, uid, ids, ifrs_line=None, period_info=None,\n fiscalyear=None, exchange_date=None, currency_wizard=None,\n number_month=None, target_move=None, pdx=None, undefined=None,\n two=None, one_per=False, bag=None, context=None):\n\n context = context and dict(context) or {}\n # TODO: Current Company's Currency shall be used: the one on wizard\n from_currency_id = ifrs_line.ifrs_id.company_id.currency_id.id\n to_currency_id = currency_wizard\n\n if number_month:\n if two:\n context = {\n 'period_from': number_month, 'period_to': number_month}\n else:\n period_id = period_info[number_month][1]\n context = {'period_from': period_id, 'period_to': period_id}\n else:\n context = {'whole_fy': True}\n\n # NOTE: This feature is not yet been implemented\n # context['partner_detail'] = pdx\n context['fiscalyear'] = fiscalyear\n context['state'] = target_move\n\n if ifrs_line.type == 'detail':\n res = self._get_sum_detail(\n cr, uid, ifrs_line.id, number_month,\n context=context)\n elif ifrs_line.type == 'total':\n res = self._get_grand_total(\n cr, uid, ifrs_line.id, number_month,\n one_per=one_per, bag=bag, context=context)\n elif ifrs_line.type == 'constant':\n res = self._get_constant(cr, uid, ifrs_line.id, number_month,\n context=context)\n else:\n res = 0.0\n\n if ifrs_line.type == 'detail':\n res = self.exchange(\n cr, uid, ids, res, to_currency_id, from_currency_id,\n exchange_date, context=context)\n return res", "def __init__(self, *args, **kwargs):\n kwargs['max_digits'] = kwargs.get('max_digits', 12)\n kwargs['decimal_places'] = kwargs.get('decimal_places', 2)\n super(MoneyField, self).__init__(*args, **kwargs)", "def getAmount(self):\n return self.base.get(\"amount\", [])", "def set_adjustment_charge_amount(self, amount):\n self.script_executor(\"var element = document.getElementById('%s'); element.style.display = 'block';\" % self.charge_amount_textbox_id)\n self.set_value_into_input_field(self.adjustment_charge_amount_textbox_locator, amount)", "def amount_on_field(self):\n\n query = \"\"\" SELECT SUM(field_entry.value::DOUBLE PRECISION)\n FROM crowdataapp_documentsetfieldentry field_entry\n INNER JOIN crowdataapp_documentsetformentry form_entry ON form_entry.id = field_entry.entry_id\n INNER JOIN crowdataapp_document document ON document.id = form_entry.document_id\n WHERE document.document_set_id = %d\n AND field_entry.verified = TRUE\n AND field_entry.field_id = %d\"\"\" % ( self.id, self.tosum_field.id)\n\n cursor = connection.cursor()\n cursor.execute(query)\n\n amount = cursor.fetchall()[0][0]\n\n return amount", "def _compute_amount(self):\n for line in self:\n line.update({\n 'price_subtotal': line.price_unit * line.quantity,\n })", "def on_change_renew(self, cr, uid, ids, enrich_id, context=None):\n enrich = self.browse(cr, uid, enrich_id, context=context)\n \tamount=enrich.amount\n\n \treturn {\n \t'value': {\n 'amount': amount,\n }\n }", "def write(self, cr, uid, ids, vals, context=None):\n #for the case of the solidarity box request\n if context:\n if 'default_type' in context and context['default_type'] == 'solidarity':\n if 'enrich_category' in vals:\n vals['amount']=self.pool.get('enrich.category').read(cr, uid, vals['enrich_category'], ['amount'], context=context)['amount']\n return super(payment_enrich, self).write(cr, uid, ids, vals, context)", "def amount(self):\r\n return self._data['amount']", "def save(self, *args, **kwargs):\n self.total = self.quantity * self.price\n super(DeliveryDetail, self).save(*args, **kwargs)", "def deposit(self, cr, uid, ids, amount, context=None):\n record = self.browse(cr, uid, ids, context=context)[0]\n current_amount = record.current_amount\n deposit_amount = record.deposit_amount\n record.write({'current_amount':current_amount + amount,\n 'deposit_amount':deposit_amount + amount })\n return True", "def skim_amount(amount, fees):\n fee = amount * fees.var + fees.fix\n vat = fee * Fees.VAT\n fee += vat\n fee = fee.round_up()\n vat = vat.round_up()\n return amount - fee, fee, vat", "def _amount(amount, asset='HBD'):\n assert asset == 'HBD', 'unhandled asset %s' % asset\n return \"%.3f HBD\" % amount", "def type_amount(self, amount):\n\n\t\twith allure.step(\"Type amount\"):\n\t\t\telement = Element(driver=self.driver,\n\t\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t\t locator=BillPayPageLocator.AMOUNT_INPUT)\n\t\t\telement.write(amount)\n\t\t\treturn None", "def test_amount_value(self):\n dict_with_value = self.info_list.get_value_info()\n self.assertEqual(dict_with_value['amount'], 26)", "def clean_amount(self):\n if self.payer_channel == 2: # ignore balance check if not using wallet\n return self.cleaned_data['amount']\n else:\n pay_amount = self.cleaned_data.get('amount')*100\n payer_wallet = Wallet.objects.filter(wallet_id=self.cleaned_data.get('payer_method')).first()\n if payer_wallet is None:\n raise forms.ValidationError(\n self.error_messages['payer wallet unavailable'],\n code='payer wallet unavailable'\n )\n else:\n payer_balance = payer_wallet.balance\n if pay_amount > payer_balance:\n raise forms.ValidationError(\n self.error_messages['no_enough_balance'],\n code='no_enough_balance'\n )\n else:\n return self.cleaned_data['amount']", "def transferfunds(self):", "def set_vat_amount(self, vat_amount):\n self.set_value_into_input_field(self.vat_amount_textbox_locator, vat_amount, True)", "def _set_instructed_amount_33B(self, val):\n self.swift_obj.CurrencyInstructedAmount = val\n self.swift_obj.CurrencyInstructedAmount.swiftTag = \"33B\"", "def transfer_amount(self, conn, data_subtract, data_add):\n sql_subtract = 'UPDATE card SET balance = balance - ? WHERE number = ?;'\n sql_add = 'UPDATE card SET balance = balance + ? WHERE number = ?;'\n\n c = conn.cursor()\n c.execute(sql_subtract, data_subtract)\n conn.commit()\n\n c = conn.cursor()\n c.execute(sql_add, data_add)\n conn.commit()\n\n # print(f\"amount {data_add[0]} was added to account {data_add[1]}\")\n print(\"Success!\")\n self.menus()", "def set_total_amount_with_taxes(self, amount):\n self.set_value_into_input_field(self.total_amount_with_taxes_textbox_locator, amount, True)", "def get_amount_line(self, txt_line, amount_exempt):\n ali_max = self.get_max_aliquot(txt_line)\n exempt = 0\n\n if ali_max == int(txt_line.tax_wh_iva_id.tax_id.amount * 100):\n exempt = amount_exempt\n total = (txt_line.tax_wh_iva_id.base + txt_line.tax_wh_iva_id.amount +\n exempt)\n return total, exempt", "def amount(self):\n\n\t\telement = Element(driver=self.driver,\n\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t locator=BillPayPageLocator.AMOUNT_INPUT)\n\t\treturn element.element_value", "def short(self, amount):", "def __str__(self) -> str:\n return f'{self.amount}{self.currency}'", "def add(self, amount):\n self.amount += amount", "def amount(self):\n return self.__amount", "def amount(self):\n return self.__amount", "def stealthUpdate(self,ability,amount):\n \n if ability == 'str':\n self.str += amount\n elif ability == 'dex':\n self.dex += amount \n elif ability == 'con':\n self.con += amount\n elif ability == 'int':\n self.int += amount\n elif ability == 'wis':\n self.wis += amount\n elif ability == 'cha':\n self.cha += amount\n elif ability == 'hp':\n self.hp += amount", "def get_transaction_value():\n #Get the user input, tranform it from a string to afloat and store it\n tx_recipient=input('Enter the recipient of the transaction: ')\n tx_amount = float(input('your transaction amount please: '))\n return tx_recipient, tx_amount", "def _amount_all(self):\r\n for order in self:\r\n amount_untaxed = amount_tax = amount_discount = timbre = 0.0\r\n for line in order.order_line:\r\n amount_untaxed += line.price_subtotal\r\n if line.product_id.timbre_fiscal:\r\n amount_tax += line.price_tax - 0.60\r\n timbre = 0.60\r\n else :\r\n amount_tax += line.price_tax\r\n amount_discount += (line.product_uom_qty * line.price_unit * line.discount)/100\r\n order.update({\r\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\r\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\r\n 'amount_discount': order.pricelist_id.currency_id.round(amount_discount),\r\n 'price_total_no_discount': amount_untaxed + amount_discount,\r\n 'timbre': timbre,\r\n 'amount_total': amount_untaxed + amount_tax + timbre,\r\n })", "def long(self, amount):", "def _amount_all(self):\n for order in self:\n amount_untaxed = 0.0\n for line in order.order_items_ids:\n amount_untaxed += line.price_subtotal\n order.update({\n 'amount_untaxed': amount_untaxed,\n })", "def get_tx_amount():\n return float(input(\"Enter Transaction Amount: \"))", "def update_total(self):\n self.objects[self.ids.AMOUNT].setText(\"Total Spend: \\xA3%.2f\" % (self.owner.total_price() / 100))", "def amount(self):\n return(self.order_master.amount)", "def __init__(self, amount: int, currency: str):\n self._amount = amount\n self._currency = currency", "def _amount_all(self):\n for order in self:\n amount_untaxed = amount_tax = 0.0\n order_amount_total = 0.0\n for line in order.order_line:\n amount_untaxed += line.price_subtotal\n amount_tax += line.price_tax\n self_amount_total = amount_untaxed + amount_tax\n if not order.discount_fixed_percent:\n order_amount_total = self_amount_total\n if order.discount_fixed_percent == 'Percent':\n order_amount_total = self_amount_total * (1 - (order.discount or 0.0) / 100.0)\n if order.discount_fixed_percent == 'Fixed':\n order_amount_total = self_amount_total - order.discount_value\n order.update({\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\n 'amount_before_disc': amount_untaxed + amount_tax,\n 'amount_total': order_amount_total,\n })", "def __init__(self,\n amount=None,\n currency=None,\n **kwargs\n\n ):\n self.amount = amount\n self.currency = currency", "def somme_encaissee(self) -> Numeric:\n return query_sum(\n self.offres().filter(paye=True),\n \"prix\",\n output_field=models.DecimalField(),\n )", "def _amount_all(self):\n res = {}\n ut_obj = self.env['l10n.ut']\n for iwdl_brw in self.browse(self.ids):\n # Using a clousure to make this call shorter\n f_xc = ut_obj.sxc(\n iwdl_brw.invoice_id.company_id.currency_id.id,\n iwdl_brw.invoice_id.currency_id.id,\n iwdl_brw.islr_wh_doc_id.date_uid)\n\n res[iwdl_brw.id] = {\n 'amount': (iwdl_brw.base_amount * (iwdl_brw.retencion_islr / 100.0)) or 0.0,\n 'currency_amount': 0.0,\n 'currency_base_amount': 0.0,\n }\n for xml_brw in iwdl_brw.xml_ids:\n res[iwdl_brw.id]['amount'] = xml_brw.wh\n res[iwdl_brw.id]['currency_amount'] = f_xc(\n res[iwdl_brw.id]['amount'])\n res[iwdl_brw.id]['currency_base_amount'] = f_xc(\n iwdl_brw.base_amount)", "def _format_instructed_amount_33B(self, val):\n instructed_amount = val.get('instructed_amount')\n currency = val.get('currency')\n if instructed_amount and currency:\n instructed_amount = apply_currency_precision(currency, abs(float(instructed_amount)))\n val = str(currency) + str(FSwiftMLUtils.float_to_swiftmt(str(instructed_amount)))\n return val", "def withdraw(self, currency, amount, address):\n pass", "def __init__(self):\n self.account_balance = 0\n self.amount = 0", "def _total_price(self, cr, uid, ids, field_name, arg, context={}):\n res = {}\n for record in self.browse(cr, uid, ids, context=context):\n val = 0.0\n for line in record.item_ids:\n val += line.price\n res[record.id] = val \n return res", "def get_transaction_value():\n # Get the user input, transform it from a string to a float and store it\n tx_recipient = input('Enter the recipient of the transaction: ')\n tx_amount = float(input('Enter the transaction amount, please: '))\n return tx_recipient, tx_amount", "def delegate(amount, to_account, account):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n if not account:\n account = mph.config[\"default_account\"]\n if not unlock_wallet(stm):\n return\n acc = Account(account, morphene_instance=stm)\n try:\n amount = float(amount)\n except:\n amount = Amount(str(amount), morphene_instance=stm)\n if amount.symbol == mph.morph_symbol:\n amount = float(amount)\n\n tx = acc.delegate_vesting_shares(to_account, amount)\n tx = json.dumps(tx, indent=4)\n print(tx)", "def amount_ui(self) -> str:\n return \"{:,.2f}\".format(self.amount)", "def set_charge_amount(self, charge_amount):\n self.set_value_into_input_field(self.charge_amount_textbox_locator, charge_amount, True)", "def send_to_airtelmoney(self, transaction_id, phone_number, amount: float, ):\n return self.send_mobile_money(transaction_id, phone_number, amount, channel=\"airtelmoney\")", "def prepare_transfer(self, transfer_amount, from_account, to_account):\n BaseElement(self.driver, locators.TRANSFER_MONEY_BUTTON).click()\n from_account_dropdown = BaseElement(self.driver, locators.FROM_ACCOUNT_DROP_DOWN)\n to_account_dropdown = BaseElement(self.driver, locators.TO_ACCOUNT_DROP_DOWN)\n from_account_dropdown.wait_until_displayed()\n to_account_dropdown.wait_until_displayed()\n from_account_dropdown.select_dropdown_value(from_account)\n to_account_dropdown.select_dropdown_value(to_account)\n TextElement(self.driver, locators.AMOUNT_INPUT).set_text(str(transfer_amount))", "def update_account_value(msg):\n if msg is not None and msg.tag == 'TotalCashValue':\n print(\"Account %s, cash: %s %s\" % (msg.account, msg.value, msg.currency))", "def _compute_amount(self):\n raise NotImplementedError()", "def save(self, *args, **kwargs):\n self.order_total = self.membership.price\n if not self.order_number:\n self.order_number = self._generate_order_number()\n super().save(*args, **kwargs)", "def format_amount(self) -> str:\n if self.amount_debit != '':\n return self.amount_debit.replace('-', '')\n return self.amount_credit.replace('-', '')", "def transfer_money(request):\n source = Account.objects.get(pk=int(request.POST.get('source-id', False)))\n destination = Account.objects.get(pk=int(request.POST.get('destination-id', False)))\n amount = float(request.POST.get('amount', False))\n enough_cash = source.available_cash >= amount\n if enough_cash:\n source.available_cash -= amount\n source.save()\n destination.available_cash += amount\n destination.save()\n messages.success(request, 'OK 200: Transfer successfully executed.')\n else:\n messages.error(request, f'Error 400: Tried to transfer {amount} from {source.name}, but only had {source.available_cash} available.')\n \n transaction = Transaction(description=f\"Transfer from {source.name} to {destination.name}.\", success=enough_cash, cash_amount=amount, source_account=source, \n destination_account=destination)\n transaction.save()\n\n return redirect('overview')", "def compute_amount_fields(self, amount, src_currency, company_currency, invoice_currency=False):\n amount_currency = False\n currency_id = False\n if src_currency and src_currency != company_currency:\n amount_currency = amount\n amount = src_currency.with_context(self._context).compute(amount, company_currency)\n currency_id = src_currency.id\n debit = amount > 0 and amount or 0.0\n credit = amount < 0 and -amount or 0.0\n if invoice_currency and invoice_currency != company_currency and not amount_currency:\n amount_currency = src_currency.with_context(self._context).compute(amount, invoice_currency)\n currency_id = invoice_currency.id\n return debit, credit, amount_currency, currency_id", "def addMoney(self, deposit_amount):\r\n self.balance_amt = self.balance_amt + deposit_amount", "def amount(self):\n return self._amount", "def _post(self, *args, **kwargs):\n\n res = super(TransactionIDFileParser, self)._post(*args, **kwargs)\n self.transfer_amount = 0.0\n self.refund_amount = 0.0\n self.commission_amount = 0.0\n rows = []\n\n for row in self.result_row_list:\n rows.append(row)\n if row['amount'] >= 0.0:\n row[\"credit\"] = row[\"amount\"]\n self.transfer_amount += row[\"amount\"]\n else:\n row[\"debit\"] = -row[\"amount\"]\n self.refund_amount += -row[\"amount\"]\n if row.get(\"commission_amount\"):\n self.commission_amount += row[\"commission_amount\"]\n del row[\"commission_amount\"]\n self.result_row_list = rows\n return res", "def amount(self, amount):\n if self.local_vars_configuration.client_side_validation and amount is None: # noqa: E501\n raise ValueError(\"Invalid value for `amount`, must not be `None`\") # noqa: E501\n\n self._amount = amount", "def send_to_mpesa(self, transaction_id, phone_number, amount: float, ):\n return self.send_mobile_money(transaction_id, phone_number, amount, channel=\"mpesa\")", "def pay(self, amt: float):\n self._money += amt", "def send_to_elipa(self, transaction_id, phone_number, amount: float, ):\n return self.send_mobile_money(transaction_id, phone_number, amount, channel=\"elipa\")", "def onchange_partner_id_ratification(self, cr, uid, ids, partner_id, journal_id, ttype, price, context={}):\n ratification_journal = self.pool.get('res.company').browse(cr, uid, uid, context=context).ratification_journal_id\n default = {'value':{}}\n context.update({'type':'purchase'})\n default['value']['journal_id'] = ratification_journal.id and ratification_journal.id or self._get_journal(cr, uid, context=context)\n if partner_id and ttype == 'ratification':\n default['value']['account_id'] = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).property_account_payable.id\n amount = 'amount' in default['value'] and default['value']['amount'] or price\n currency_format = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_format\n amount_in_word = currency_format=='ar' and amount_to_text_ar(amount, currency_format) or amount_to_text(amount)\n\n default['value'].update({'amount_in_word':amount_in_word})\n if journal_id:#TODO:\n allow_check_writing = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context).allow_check_writing\n default['value'].update({'allow_check':allow_check_writing}) \n return default", "def _exchange_amount(amount, rate):\n return '%.2f' % round(float(amount) * float(rate), 2)", "def set_price(self, request, pk):\n return Response('20$')", "def set_price(self, request, pk):\n return Response('20$')", "def onchange_quantity_sum(self,cr,uid,ids,lines,qty,context=None):\n if context is None:\n context = {}\n total = 0\n res = {'value':{}}\n for line in lines:\n total = total + round(line[2]['quantity'],4)\n diff = round(qty - total,4)\n if diff < 0 :\n diff = 0 \n res = {'value':{'qty_total':total,'qty_res':diff}}\n return res", "def post(self, amount, other_account, description, self_memo=\"\", other_memo=\"\", datetime=None):\r\n\r\n #Note: debits are always positive, credits are always negative. They should be negated before displaying\r\n #(expense and liability?) accounts\r\n tx = self._new_transaction()\r\n\r\n if datetime:\r\n tx.t_stamp = datetime\r\n #else now()\r\n\r\n tx.description = description\r\n tx.save()\r\n\r\n a1 = self._make_ae(self._DEBIT_IN_DB()*amount, self_memo, tx)\r\n a1.save()\r\n a2 = other_account._make_ae(-self._DEBIT_IN_DB()*amount, other_memo, tx)\r\n a2.save()\r\n\r\n return (a1,a2)", "def report(self):\n print(f\"Money: {self.CURRENCY}{self.profit}\")", "def __init__(self, amount):\n if amount < 0 or amount > 1:\n raise ValueError()\n self.amount = amount", "def getUserCurrency():", "def validate_payment_amount(\n self,\n value: Text,\n dispatcher: CollectingDispatcher,\n tracker: Tracker,\n domain: Dict[Text, Any],\n ) -> Dict[Text, Any]:\n\n credit_card = tracker.get_slot(\"credit_card\")\n cc_balance = tracker.get_slot(\"credit_card_balance\")\n account_balance = float(tracker.get_slot(\"account_balance\"))\n try:\n entity = get_entity_details(\n tracker, \"amount-of-money\"\n ) or get_entity_details(tracker, \"number\")\n amount_currency = parse_duckling_currency(entity)\n if not amount_currency:\n raise (TypeError)\n if account_balance < float(amount_currency.get(\"amount_of_money\")):\n dispatcher.utter_message(template=\"utter_insufficient_funds\")\n return {\"payment_amount\": None}\n return amount_currency\n except (TypeError, AttributeError):\n pass\n if value and value.lower() in cc_balance.get(credit_card.lower()):\n key = value.lower()\n amount = cc_balance.get(credit_card.lower()).get(key)\n amount_type = f\" (your {key})\"\n\n if account_balance < float(amount):\n dispatcher.utter_message(template=\"utter_insufficient_funds\")\n return {\"payment_amount\": None}\n return {\n \"payment_amount\": f\"{amount:.2f}\",\n \"payment_amount_type\": amount_type,\n \"currency\": \"$\",\n }\n\n else:\n dispatcher.utter_message(template=\"utter_no_payment_amount\")\n return {\"payment_amount\": None}", "def username_password_to_amount(posted_data) -> tuple:\n username = posted_data[USERNAME]\n password = posted_data[PASSWORD]\n money = posted_data[AMOUNT]\n to = posted_data[TO]\n return username, password, to, money", "def amount(self):\n return self.subtotal + self.tax_subtotal + self.shipping", "def net_amount(self, net_amount):\n\n self._net_amount = net_amount" ]
[ "0.6432208", "0.6382008", "0.63806385", "0.63244236", "0.62620044", "0.6146637", "0.6121447", "0.6117151", "0.6043075", "0.6043075", "0.6043075", "0.6043075", "0.6043075", "0.6043075", "0.6043075", "0.6043075", "0.6043075", "0.6030417", "0.59542346", "0.5954213", "0.59393656", "0.59172356", "0.57893604", "0.5775964", "0.57725656", "0.5768231", "0.5754966", "0.5754729", "0.575372", "0.57528555", "0.57231873", "0.57076335", "0.570203", "0.56914157", "0.56693196", "0.56612086", "0.5656488", "0.5652144", "0.5636408", "0.56271315", "0.55968255", "0.5582283", "0.55666363", "0.5564858", "0.55638224", "0.5545907", "0.5538124", "0.5534506", "0.5527516", "0.55173844", "0.5517321", "0.5517321", "0.550739", "0.5505797", "0.5494114", "0.5490159", "0.5465856", "0.5444878", "0.5444501", "0.5442696", "0.5438902", "0.543798", "0.5419315", "0.54001594", "0.53960586", "0.5386833", "0.5361002", "0.5347291", "0.53470105", "0.53432775", "0.5314467", "0.53131163", "0.53129977", "0.5311295", "0.5295968", "0.5293631", "0.52857864", "0.52781385", "0.5274044", "0.5265534", "0.52612454", "0.5250444", "0.5245889", "0.5243895", "0.52367437", "0.52075124", "0.5204121", "0.5193596", "0.5186762", "0.5181627", "0.5178685", "0.5178685", "0.5176055", "0.51751584", "0.5157793", "0.51572144", "0.51521724", "0.5151295", "0.5147919", "0.51337224", "0.5129741" ]
0.0
-1
Send null value in PAN fields
def test_12(self): assert 'False' == Api.requestBlock('test-12')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def noneType(value):\r\n return ''", "def nullValueToNan(self) -> None:\n self.cpp.nullValueToNan()", "def none_to_empty(data):\n return data if data is not None else ''", "def fillna_method(request: Any) -> Any:\n return request.param", "def fillna_method(request: Any) -> Any:\n return request.param", "def silent_none(value):\n if value is None:\n return ''\n return value", "def test_format_phone_none(self):\n number1 = None\n self.assertEqual(format_phone(number1), None)", "def _decode_none(value):\n return value", "def test_serialize_none(self):\n self.assertEqual(serialize(None), 'null')", "def test_null_as_null_indicator(self):\n self.custom_null_indicator_template('null')", "def _set_None(self):\n\n self.description = None\n self.func = None", "def none(self):", "def test_add_none_field(self):\n user_id = get_rand_string()\n data = get_rand_string()\n id = get_rand_string()\n\n doc = {}\n doc[\"user_id\"] = user_id\n doc[\"data\"] = data\n doc[\"id\"] = id\n doc[\"num\"] = None\n\n self.conn.add(**doc)", "def notna(self):\n return super().notna()", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def _nullify(self, value):\n if not str(value).strip():\n return None\n else:\n return value", "def __str__(self):\r\n # If the original value is None, represent this as 'NULL'\r\n if self.original is None:\r\n return 'NULL'\r\n return str(self.original)", "def allowNoneInternalLabel(self):\n return self.isAllowedInternalLabel(None)", "def allowNoneInternalLabel(self):\n return self.isAllowedInternalLabel(None)", "def na(fluid,network,propname,**params):\n value = -1\n network.set_pore_data(phase=fluid,prop=propname,data=value)", "def _deserialize_null(self, *args):\n return None", "def leaf_NoneType(self, value, depth, available):\n return \"null\", False", "def testNoneValue(self):\n objectID = uuid4()\n user = createUser(u'username', u'password', u'User',\n u'user@example.com')\n namespace = createNamespace(user, u'name')\n tag = createTag(user, namespace, u'tag')\n self.store.add(TagValue(user.id, tag.id, objectID, None))", "def test_none(self):\n self.assertEqual(self.obj.to_json_string(None), '[]')", "def convert_nil(self, v, t):\n assert len(v) == 0\n return None", "def null_enabled(self):\n return False", "def nulltest():", "def form_InputNoneValue(request):\n schema = schemaish.Structure()\n schema.add('inputStrip', schemaish.String(default=''))\n\n form = formish.Form(schema, 'form')\n form['inputStrip'].widget = formish.Input(strip=True, none_value='BANG')\n return form", "def encode_null_term(self, input):\n return input.encode() + b'\\x00'", "def test_value_to_string(self):\r\n obj = self.rp\r\n field = self.rp._meta.get_field_by_name('body')[0]\r\n self.assertNotEqual(field.value_to_string(obj), u'') # expected\r\n self.assertEqual(field.value_to_string(None), u'') # edge case\r", "def NULL(self, t):\n t.value = None\n return t", "def null() -> SetupVal:\n return NullVal()", "def enable_null_tracking(*args, **kwargs): # real signature unknown\n pass", "def get_none1(self):\n pass", "def format_field(self, value, format_spec):\n if value is None:\n return format(value)\n else:\n return super(NoneFormatter, self).format_field(value, format_spec)\n if value is None:\n return format(value)\n else: raise e", "def test_default_null_indicator(self):\n self.custom_null_indicator_template()", "def is_null(space, w_obj):\n return space.wrap(w_obj.tp == space.tp_null)", "def _encode_nullable(data_type, obj, alias_validators, old_style, for_msgpack):\n if obj is not None:\n return _json_compat_obj_encode_helper(\n data_type.validator, obj, alias_validators, old_style, for_msgpack)\n else:\n return None", "def get_is_null_label(self):\n return pgettext_lazy('listfilter AbstractDateTime', 'Has no value')", "def test_field_none_nullable(self):\n node_dict = {\n 'host_name': 'abc'\n }\n try:\n Node(**node_dict)\n except Exception as e:\n self.assertEqual(type(e), ValueError)", "def allowNoneEgressLabel(self):\n return self.isAllowedEgressLabel(None)", "def allowNoneEgressLabel(self):\n return self.isAllowedEgressLabel(None)", "def setNone(self):\n self.setValue([])", "def name(self):\n return 'Null'", "def nullValueToZero(self) -> None:\n self.cpp.nullValueToZero()", "def allowNoneIngressLabel(self):\n return self.isAllowedIngressLabel(None)", "def allowNoneIngressLabel(self):\n return self.isAllowedIngressLabel(None)", "def noneToString(text):\n if text in (None, \"\"):\n return \"None\"\n else:\n return str(text)", "def get_prep_value(self, value):\n if (value is UNKNOWN) or (value is ''):\n # If Django tries to save an empty string, send the db None (NULL).\n return None\n else:\n # Otherwise, just pass the value.\n return value", "def nics_none(self, nics_none):\n\n self._nics_none = nics_none", "def null(self):\n val = self.read(4)\n if val != b'null':\n self.on_parser_error(\"null token expected\")\n return null", "def test_empty_value(self):\n avp_val = avp.AVP(0)\n self.assertEqual(avp_val.value, None)\n self.assertEqual(avp_val.payload, None)\n\n # We can then set its value\n avp_val.value = b''\n self.assertEqual(avp_val.value, b'')\n self.assertEqual(avp_val.payload, b'')\n\n # And unset it again\n avp_val.value = None\n self.assertEqual(avp_val.value, None)\n self.assertEqual(avp_val.payload, None)", "def changenonetoNone(s):\r\n if s=='None':\r\n return None\r\n else:\r\n return s", "def field(self):\n return None", "def allow_null_values(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_null_values\")", "def test_mask_secret_null():\n assert utils.mask_secrets(\"\", None) == \"\"", "def allow_null_values(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"allow_null_values\")", "def _set_None(self):\r\n\r\n self.loss_list = None\r\n self.meshsol_list = None\r\n self.loss_index = None\r\n self.logger_name = None\r\n self.axes_dict = None\r\n self.Pstator = None\r\n self.Protor = None\r\n self.Pmagnet = None\r\n self.Pprox = None\r\n self.Pjoule = None\r\n self.coeff_dict = None", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def define_null(rastlist, NoData_Value, Quiet=False):\n\n rastlist = core.enf_rastlist(rastlist)\n\n # iterate through each file in the filelist and set nodata values\n for rastname in rastlist:\n\n arcpy.SetRasterProperties_management(rastname,data_type=\"#\",statistics=\"#\",\n stats_file=\"#\",nodata=\"1 \"+str(NoData_Value))\n \n print(\"Set nulls in {0}\".format(rastname)) \n return", "def get_blank(record, field_name, reason=\" in this case.\"):\n val = recordval(record, field_name)\n if val == \"\":\n return \"\"\n else:\n parser_error(\"field \"+field_name+\" must be blank\"+reason)\n return val", "def __str__(self):\n #{{{ Nicely print values\n text = 'Null values for databases: %s' % self.dbcentral.list()\n\n for value in self.null_vals.keys():\n text += \"\\t%s: %s\" % (value,self.null_vals[value])\n\n return text", "def for_json(self):\n if self.value_is_null:\n return None\n return str(self)", "def test_none(self):\n self.assertEqual(b\"\", self.successResultOf(to_xml(None)))", "def guiField(self, value):\n return None", "def test_undefined_as_null_indicator(self):\n self.custom_null_indicator_template('undefined')", "def blank(self):\n pass", "def missing_information(self, info, field):\n raise NoData" ]
[ "0.6484191", "0.6448958", "0.59863204", "0.5944134", "0.5895153", "0.5776917", "0.5776917", "0.57559484", "0.5753347", "0.5635493", "0.5596744", "0.5581456", "0.5563318", "0.5559931", "0.553137", "0.55202323", "0.5512284", "0.5512284", "0.5512284", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54971886", "0.54929733", "0.54672855", "0.5461712", "0.5461712", "0.5460644", "0.54448426", "0.54296005", "0.54258615", "0.5415614", "0.5405332", "0.54048276", "0.5397816", "0.5375866", "0.53737813", "0.537278", "0.53706545", "0.5320917", "0.53199834", "0.53198916", "0.5319175", "0.5305506", "0.5304801", "0.5304408", "0.5294494", "0.52827334", "0.5268019", "0.5268019", "0.52648693", "0.5263151", "0.5239119", "0.5238133", "0.5238133", "0.5224296", "0.5222325", "0.52126527", "0.5210492", "0.5199837", "0.5186676", "0.51840097", "0.5173658", "0.51728266", "0.5165577", "0.51497173", "0.51405245", "0.51405245", "0.5129694", "0.51289284", "0.51124746", "0.51022685", "0.50973904", "0.50890845", "0.508461", "0.5081061", "0.50804883" ]
0.0
-1
Send special characters in PAN fields
def test_13(self): assert 'False' == Api.requestBlock('test-13')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_value_special_chars(self):\n raw = [\n 0x48,\n 0x65,\n 0x79,\n 0x21,\n 0x3F,\n 0x24,\n 0x20,\n 0xC4,\n 0xD6,\n 0xDC,\n 0xE4,\n 0xF6,\n 0xFC,\n 0xDF,\n ]\n string = \"Hey!?$ ร„ร–รœรครถรผรŸ\"\n self.assertEqual(DPTString.to_knx(string), raw)\n self.assertEqual(DPTString.from_knx(raw), string)", "def test_20_phonenumbers_UnicodeDecodeError(self):\n number_phone = self.samples[2]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)", "def _hidden_in_unicode(self, txt):", "def test_specialchar(self):\n form_data = self.form_data('vNzwXpzKJyTshvHsuULn')\n form = self.form(data=form_data, user=self.u)\n self.assertFalse(form.is_valid())", "def test_post_special_characters(self):\n self.is_authenticated()\n response = self.post_special_characters()\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data[\"error\"], \"You cannot post special characters\")", "def encode(self, text):", "def jpn(string):\n # type: (str) -> str\n\n try:\n string = unicode(string, \"utf-8\")\n string = string.encode(\"cp932\")\n return string\n\n except Exception:\n return string", "def display_content(com,message):\n #message = message.encode('utf-8')\n #message = message.decode('ascii', 'ignore')\n safeMsg = filter(lambda x: x in string.printable, message)\n safeMsg = safeMsg.replace('\\n', ' ')\n print \"ALPHA: \", safeMsg\n try:\n #com = serial.Serial(config.devalpha, 9600, timeout=3)\n #com.close()\n #com.open()\n comstr = config.alpha['display'] + safeMsg + config.alpha['eot']\n com.write(comstr)\n #com.close()\n except serial.SerialException as e:\n logging.warning(\"Serial exception: \"+str(e))", "def send_to_outputfield(self, message):\n\n try:\n # First strip characters outside of range\n # that cannot be handled by tkinter output field\n char_list = ''\n for x in range(len(message)):\n if ord(message[x]) in range(65536):\n char_list += message[x]\n message = char_list\n except Exception as e:\n logging.error(str(e))\n logging.exception(\"Exception : \")\n try:\n self.output.insert(END, message + \"\\n\")\n except Exception as e:\n logging.error(str(e))\n logging.exception(\"Exception : \")", "def sendOTP(code):\n # Modify the code here to change from print to any output \n print(\"Your OTP is \" + code + \". Kindly do not share it with anyone\")", "def non_secret_char(c):\n return c", "def secret_char(c):\n return \"\\\\raisebox{{0.07ex}}{{{}}}\".format(c)", "def get_address(self):\r\n return \"iDigi\"", "def sendkey_escape(string):\r\n return re.sub(r'([+^%~{}\\[\\]()])', r'{\\1}', string)", "def Print(self,text = \"\"):\n self.Bus.Write_String(self.Address,0x00, text)", "def send_pan(self, value=63, ch=None):\n self.send_control_change(PAN, value, ch=ch)", "def test_contains_nonprintable_characters(self):\n result = attributeAsLDIF(b\"key\", b\"val\\xFFue\")\n self.assertEqual(result, b\"key:: %s\\n\" % encode(b\"val\\xFFue\"))", "def test_unsuccessful_post_answer_with_special_characters(self):\n self.is_authenticated(self.user1)\n response = self.post_answer_with_special_character()\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def viewAbstract(self):\n #Encode text as utf-8", "def encode_text():\n print(f\"{YELLOW}[{MIDDLE_DOT}]{RESET} Enter message to encode: \", end=\"\")\n message = input()\n encoded = LEFT_TO_RIGHT_MARK\n for message_char in message:\n code = '{0}{1}'.format('0' * padding, int(str(to_base(\n ord(message_char), len(zero_space_symbols)))))\n code = code[len(code) - padding:]\n for code_char in code:\n index = int(code_char)\n encoded = encoded + zero_space_symbols[index]\n\n encoded += RIGHT_TO_LEFT_MARK\n\n pyperclip.copy(encoded)\n print(f\"{GREEN}[+]{RESET} Encoded message copied to clipboard. {GREEN}[+]{RESET}\")", "def escape_character_in_string(self, a, text):\n logging.debug(\"in escape character \" + text)\n #self.just_read_char()\n self.read_char()\n self.produce(STRING, text)", "def test_sendUnicodeCommand(self):\n self.p.sendCommand(\"CMD\", (\"param\\u00b9\", \"param\\u00b2\"))\n self.check(b\"CMD param\\xc2\\xb9 param\\xc2\\xb2\\r\\n\")", "def test_special_characters_business_name(self):\n user = \"chairman\"\n msg = self.business_item_class.create_business(\"Ma@en_deleo\", user, \"soft*%ware\", \"nairo@&\")\n print(msg)\n self.assertEqual(msg, {\"message\":\"Business name should not contain special characters\"})", "def writechar(self, char: int, /) -> None:", "def _apply_character_maskings(self):\n for permutation in self.permutations:\n for char_symbol in self.characters.keys():\n for i in permutation.find_all(\"character-link\", ref=char_symbol): \n i.string.replace_with(self.characters[char_symbol])\n\n self.plain_text = \" \".join([permuation.description.text for permuation in self.permutations])\n self.reapply_plain_text_editing()", "def dummy_junction13():\n return 'junction:chr1:176-299:+'", "def dummy_junction12():\n return \"junction:chr1:176-224:+\"", "def dummy_junction14():\n return \"junction:chr1:176-324:+\"", "def get21_str(in_dict):\n return \"\"\"BEGIN:VCARD\nVERSION:2.1\nN;ENCODING=QUOTED-PRINTABLE;CHARSET=UTF-8:;%s;;;\nTEL;VOICE;CELL:%s\nEND:VCARD\n\"\"\"%(quopri.encodestring(in_dict[\"name\"]), in_dict[\"tel\"])", "def characters(self, data):\n pass", "def dummy_junction24():\n return 'junction:chr1:251-399:+'", "def output(self, txt, addr=None):\n (passwd, text) = txt.split(\" \", 1)\n text = text.replace(\"\\00\", \"\")\n if passwd == self._cfg.password:\n for bot in fleet:\n bot.announce(text)", "def display_text(self, text):\n self.write_to_serial(':DISP:TEXT \\'' + text + '\\'')", "def mask_disc_markers(self, text: str) -> str:\n punctuations = \".?!;:-()'\\\"[]\"\n for elem in punctuations:\n text = text.replace(elem, \" \" + elem + \" \")\n text = \" \" + text + \" \"\n for dm in self.dms:\n text.replace(\" \" + dm + \" \", \" <mask> \" * len(dm.split()))\n return text", "def test_str(self):\n self.assertEqual(\n \"\\N{SNOWMAN}\",\n bytes_to_str(\"\\N{SNOWMAN}\"),\n )", "def replace_chars(field, esc_chars, rep_ch):\n res_field = \"P\"\n if field is not None:\n res_field = re.sub(esc_chars, rep_ch, field).upper()\n # res_field = \"\".join([rep_ch if ch in esc_chars else ch for ch in field.strip()])\n return res_field", "def gateway(arg):\n\tassert isinstance(arg, str)\n\treturn r\"(?P<%s>[\\w_\\-@\\' \\.]+)\" % (arg,)", "def _transform_by_type(self, text):\n\t\treturn '*' * len(text) if self._model.inputType == 'password' else text", "def test_40_phonenumbers_too_long(self):\n number_phone = self.samples[4]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)", "def sendPDU(self, pdu):\n # this operation does not verify the contents of the DU\n self.send(pdu.getBufferString())", "def test_submithint_escape(self):\r\n mock_module = CHModuleFactory.create()\r\n json_in = {'answer': '29.0', 'hint': '<script> alert(\"Trololo\"); </script>'}\r\n mock_module.submit_hint(json_in)\r\n self.assertTrue(mock_module.hints['29.0']['0'][0] == u'&lt;script&gt; alert(&quot;Trololo&quot;); &lt;/script&gt;')", "def and_command(self):\n self.write(\"@SP\\nAM=M-1\\nD=M\\nA=A-1\\nM=M&D\\n\")", "def replace_special_chars(self, word):\n try:\n if (self.lang==\"tr\"):\n word = re.sub(u\"\\^db\", u\"+db\", word)\n word = re.sub(u\"\\^\", u\"ยฌ\", word)\n word = re.sub(u\"\\$\", u\"ยฃ\", word)\n except UnicodeDecodeError:\n word = ''\n return word", "def createUnicodeString(self, address: ghidra.program.model.address.Address) -> ghidra.program.model.listing.Data:\n ...", "def cryptate(self):\r\n\r\n intab1 = \"abcdefghijklomnopqrstuvwxyz\"\r\n outtab1 = \"?2p=o)7i(u9/y&t3%rยค5e#w1q!>)\"\r\n# Fetching the writing in textbox\r\n s = self.textbox.toPlainText()\r\n a = s.lower()\r\n# The crypting process, replaces letters in intab1 with outtab1\r\n crypted = (a.translate({ord(x): y for (x, y) in zip(intab1, outtab1)}))\r\n# Clear the textbox\r\n self.textbox.clear()\r\n# Write the crypted text within textbox\r\n self.textbox.setPlainText(crypted)", "def write( self, ucode ):\n\t\tassert isinstance( ucode, unicode ), \"the code to compute in barcode must be a unicode string\" \n\t\t\n\t\t# code barre message must starts and finish with *\n\t\tucode = u'*%s*' % ucode.strip()\n\t\t\n\t\tself.owner.write_bytes( self._bc_start )\n\t\t# Transfert every computed esc sequence into the document \n\t\tfor bytes_seq in self.code_to_sequences( ucode ):\n\t\t\tself.owner.write_bytes( bytes_seq )\n\t\t\tself.owner.write_bytes( eval( '_ns', globals(), self.__dict__ ) ) # add a ns after each caracter sequence\n\t\tself.owner.write_bytes( self._bc_end )", "def tah(pole, cislo_policka, symbol):\n list_pole[cislo_policka] = symbol \n pole = \"\".join(list_pole)\n print(pole)\n return pole", "def get_char_echo(self) -> str:\n ...", "def test_special_characters(self):\n testString = sanitize('[-;]\\`{\\}')\n self.assertEqual(testString, '_________')", "def OnTextCtrlPasswordText(self, event):\r\n\t\tself._password = event.GetString()", "def _encode_supplement(self):", "def _encode_supplement(self):", "def test_ascii_to_phred64(self):\r\n self.assertEqual(ascii_to_phred64('@'), 0)\r\n self.assertEqual(ascii_to_phred64('^'), 30)", "def w(s):\r\n gv[\"epsf\"].write(s + \"\\n\")", "def test_taxa_with_special_characters(self):\r\n fd, taxonomy_fp = mkstemp()\r\n close(fd)\r\n f = open(taxonomy_fp, \"w\")\r\n f.write(rdp_id_to_taxonomy_special_chars)\r\n f.close()\r\n self._paths_to_clean_up.append(taxonomy_fp)\r\n\r\n app = RdpTaxonAssigner({\r\n 'id_to_taxonomy_fp': taxonomy_fp,\r\n 'reference_sequences_fp': self.reference_seqs_file.name,\r\n })\r\n res = app(self.tmp_seq_filepath)\r\n obs_lineage, obs_confidence = res['X67228 some description']\r\n\r\n self.assertEqual(obs_lineage, (\r\n \"Bacteria;Proteobacteria;Alphaproteobacteria;Rhizobiales<What;\"\r\n \"Rhizobiaceae&Huh?;Rhizobium\"))\r\n self.assertEqual(obs_confidence, 1.0)", "def test_special_chars(self):\r\n # afaik, location.check_list prevents $ in all fields\r\n org = 'foo.org.edu'\r\n course = 'bar.course-4'\r\n name = 'baz.run_4-3'\r\n location = Location(org, course, name, 'course', name)\r\n prob_locator = loc_mapper().translate_location(\r\n location,\r\n add_entry_if_missing=True\r\n )\r\n reverted_location = loc_mapper().translate_locator_to_location(prob_locator)\r\n self.assertEqual(location, reverted_location)", "def test_phred_to_ascii64(self):\r\n self.assertEqual(phred_to_ascii64(0), '@')\r\n self.assertEqual(phred_to_ascii64(30), '^')", "def __unicode__(self):\n d = ((2, \".\"), (6, \".\"), (10, \"/\"), (15, \"-\"))\n s = list(map(str, self.cnpj))\n \n for i, v in d:\n s.insert(i, v)\n \n r = ''.join(s)\n \n return r", "def sendPDU(self, pdu):\n # this operation does not verify the contents of the PDU\n self.send(pdu.getBufferString())", "def test_specialchar(self):\n form_data = {\n 'username': 'testuser',\n 'password1': 'vNzwXpzKJyTshvHsuULn',\n 'password2': 'vNzwXpzKJyTshvHsuULn'\n }\n form = StrictUserCreationForm(data=form_data)\n self.assertFalse(form.is_valid())", "def escapeEncode(s: unicode) -> unicode:\n ...", "async def badman(self, ctx):\n await ctx.message.edit(content=\"ฬฟฬฟ ฬฟฬฟ ฬฟฬฟ ฬฟ'ฬฟ'\\ฬตอ‡ฬฟฬฟ\\ะท= ( โ–€ อœอžส–โ–€) =ฮต/ฬตอ‡ฬฟฬฟ/โ€™ฬฟโ€™ฬฟ ฬฟ ฬฟฬฟ ฬฟฬฟ ฬฟฬฟ\")", "def phred_to_ascii(p):\n return chr(p+33)", "def test_00_phonenumbers_formatting_en_US(self):\n number_phone = self.samples[0]\n res = self.pn._symbol_set_char(number_phone)\n self.assertEqual(res, '+19545551234', 'e164 phone formatting failed')\n res = self.pn._symbol_get(number_phone)\n self.assertEqual(res, '+1 954-555-1234', 'International phone formatting failed')", "def jid_escape(nodeId):\n if nodeId is None:\n return\n\n newNode = nodeId\n\n newNode = newNode.replace(\"\\\\\", '\\\\5c')\n newNode = newNode.replace(' ', \"\\\\20\")\n newNode = newNode.replace('\"', '\\\\22')\n \n newNode = newNode.replace(\"&\", '\\\\26')\n newNode = newNode.replace(\"'\", '\\\\27')\n newNode = newNode.replace(\"/\", '\\\\2f')\n newNode = newNode.replace(\":\", '\\\\3a')\n newNode = newNode.replace(\"<\", '\\\\3c')\n newNode = newNode.replace(\">\", '\\\\3e')\n newNode = newNode.replace(\"@\", '\\\\40')\n return newNode", "def send(self, s):\n self.port.write(bytes(s, 'latin-1'))\n sys.stdout.write(s)", "def message(self, text):\n\n if( rpi_device ):\n self.clear()\n for char in text:\n if char == '\\n' or char == '^':\n self.cmd(0xC0) # new line\n else:\n self.cmd(ord(char),True)", "def label(mi_, ma_):\n\treturn \"caractรจres Unicode des points de code {} ร  {}\".format(mi_, ma_)", "def _preprocess(self, sent: str) -> str:\n sent = sent.replace(\" \", \"โ–\")\n return \" \".join([c for c in sent])", "def padded_area_code(phone_number):\r\n area_code = grab_area_code(phone_number)\r\n return area_code + \"*******\"", "def name(self):\n return self.tr('NATCAPES')", "def test_keyboard_characters(self):\n pass", "def test_contains_special_characters(self):\n for c in b\"\\0\", b\"\\n\", b\"\\r\":\n\n value = b\"foo\" + c + b\"bar\"\n result = attributeAsLDIF(b\"key\", value)\n self.assertEqual(result, b\"key:: %s\\n\" % encode(value))", "def removeSpecialChars(self) -> None:\n self.text = re.sub('[^a-zA-z0-9\\n\\.\\s]', '', self.text)", "def _escape(msg):\n reserved = bytearray('\\x7E\\x7D\\x11\\x13'.encode())\n escaped = bytearray()\n escaped.append(msg[0])\n\n for byte in msg[1:]:\n\n if byte in reserved:\n escaped.append(0x7D)\n escaped.append(byte ^ 0x20)\n else:\n escaped.append(byte)\n\n return escaped", "def barcode_message( self, ucode ):\n\t\tassert isinstance( ucode, unicode ), \"the code to compute in barcode must be a unicode string\" \n\n\t\tfor uchar in ucode:\n\t\t\tif not( uchar in self._char39 ):\n\t\t\t\traise Barcode39Error( '%s char is not listed in Barcode39 characters [0..9,A..Z,space,9,-,.,$,/,+,%]' )\n\t\n\t\treturn u'*%s*' % ucode.strip()", "def rendermsg(self,msg):\n return ' '.join(['%02x'%ord(x) for x in msg])", "def get_telnet_jigstr(self):\n assert(False) #No implementation", "def replace_nonprintables(string):\n\tnew_string = \"\"\n\tmodified = 0\n\tfor c in string:\n\t\to = ord(c)\n\t\tif (o <= 31):\n\t\t\tnew_string += \"^\" + chr(ord('@') + o)\n\t\t\tmodified += 1\n\t\telif (o == 127):\n\t\t\tnew_string += \"^?\"\n\t\t\tmodified += 1\n\t\telse:\n\t\t\tnew_string += c\n\tif modified and Config.Config().urlencoding_mode != \"fixbucket\":\n\t\twarning(\"%d non-printable characters replaced in: %s\" % (modified, new_string))\n\treturn new_string", "def register_all(self):\n # TODO complete this list\n # register special symbols\n self.register(u'\\n\\n', u' \\\\par', encode=False)\n self.register(u'\\n\\n', u'\\\\par', encode=False)\n self.register(u' ', u'\\\\ ', encode=False)\n self.register(u'\\N{EM SPACE}', u'\\\\quad')\n self.register(u'\\N{THIN SPACE}', u' ', decode=False)\n self.register(u'%', u'\\\\%')\n self.register(u'\\N{EN DASH}', u'--')\n self.register(u'\\N{EN DASH}', u'\\\\textendash')\n self.register(u'\\N{EM DASH}', u'---')\n self.register(u'\\N{EM DASH}', u'\\\\textemdash')\n self.register(u'\\N{REPLACEMENT CHARACTER}', u\"????\", decode=False)\n self.register(u'\\N{LEFT SINGLE QUOTATION MARK}', u'`', decode=False)\n self.register(u'\\N{RIGHT SINGLE QUOTATION MARK}', u\"'\", decode=False)\n self.register(u'\\N{LEFT DOUBLE QUOTATION MARK}', u'``')\n self.register(u'\\N{RIGHT DOUBLE QUOTATION MARK}', u\"''\")\n self.register(u'\\N{DOUBLE LOW-9 QUOTATION MARK}', u\",,\")\n self.register(u'\\N{DOUBLE LOW-9 QUOTATION MARK}', u'\\\\glqq',\n encode=False)\n self.register(u'\\N{LEFT-POINTING DOUBLE ANGLE QUOTATION MARK}',\n u'\\\\guillemotleft')\n self.register(u'\\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}',\n u'\\\\guillemotright')\n self.register(u'\\N{MODIFIER LETTER PRIME}', u\"'\", decode=False)\n self.register(u'\\N{MODIFIER LETTER DOUBLE PRIME}', u\"''\", decode=False)\n self.register(u'\\N{MODIFIER LETTER TURNED COMMA}', u'`', decode=False)\n self.register(u'\\N{MODIFIER LETTER APOSTROPHE}', u\"'\", decode=False)\n self.register(u'\\N{MODIFIER LETTER REVERSED COMMA}', u'`',\n decode=False)\n self.register(u'\\N{DAGGER}', u'\\\\dag')\n self.register(u'\\N{DOUBLE DAGGER}', u'\\\\ddag')\n\n self.register(u'\\\\', u'\\\\textbackslash', encode=False)\n self.register(u'\\\\', u'\\\\backslash', mode='math', encode=False)\n\n self.register(u'\\N{TILDE OPERATOR}', u'\\\\sim', mode='math')\n self.register(u'\\N{MODIFIER LETTER LOW TILDE}',\n u'\\\\texttildelow', package='textcomp')\n self.register(u'\\N{SMALL TILDE}', u'\\\\~{}')\n self.register(u'~', u'\\\\textasciitilde')\n\n self.register(u'\\N{BULLET}', u'\\\\bullet', mode='math')\n self.register(u'\\N{BULLET}', u'\\\\textbullet', package='textcomp')\n self.register(u'\\N{ASTERISK OPERATOR}', u'\\\\ast', mode='math')\n\n self.register(u'\\N{NUMBER SIGN}', u'\\\\#')\n self.register(u'\\N{LOW LINE}', u'\\\\_')\n self.register(u'\\N{AMPERSAND}', u'\\\\&')\n self.register(u'\\N{NO-BREAK SPACE}', u'~')\n self.register(u'\\N{INVERTED EXCLAMATION MARK}', u'!`')\n self.register(u'\\N{CENT SIGN}', u'\\\\not{c}')\n\n self.register(u'\\N{POUND SIGN}', u'\\\\pounds')\n self.register(u'\\N{POUND SIGN}', u'\\\\textsterling', package='textcomp')\n self.register(u'\\N{YEN SIGN}', u'\\\\yen')\n self.register(u'\\N{YEN SIGN}', u'\\\\textyen', package='textcomp')\n\n self.register(u'\\N{SECTION SIGN}', u'\\\\S')\n self.register(u'\\N{DIAERESIS}', u'\\\\\"{}')\n self.register(u'\\N{NOT SIGN}', u'\\\\neg')\n self.register(u'\\N{HYPHEN}', u'-', decode=False)\n self.register(u'\\N{SOFT HYPHEN}', u'\\\\-')\n self.register(u'\\N{MACRON}', u'\\\\={}')\n\n self.register(u'\\N{DEGREE SIGN}', u'^\\\\circ', mode='math')\n self.register(u'\\N{DEGREE SIGN}', u'\\\\textdegree', package='textcomp')\n\n self.register(u'\\N{MINUS SIGN}', u'-', mode='math')\n self.register(u'\\N{PLUS-MINUS SIGN}', u'\\\\pm', mode='math')\n self.register(u'\\N{PLUS-MINUS SIGN}', u'\\\\textpm', package='textcomp')\n\n self.register(u'\\N{SUPERSCRIPT TWO}', u'^2', mode='math')\n self.register(\n u'\\N{SUPERSCRIPT TWO}',\n u'\\\\texttwosuperior',\n package='textcomp')\n\n self.register(u'\\N{SUPERSCRIPT THREE}', u'^3', mode='math')\n self.register(\n u'\\N{SUPERSCRIPT THREE}',\n u'\\\\textthreesuperior',\n package='textcomp')\n\n self.register(u'\\N{ACUTE ACCENT}', u\"\\\\'{}\")\n\n self.register(u'\\N{MICRO SIGN}', u'\\\\mu', mode='math')\n self.register(u'\\N{MICRO SIGN}', u'\\\\micro', package='gensymu')\n\n self.register(u'\\N{PILCROW SIGN}', u'\\\\P')\n\n self.register(u'\\N{MIDDLE DOT}', u'\\\\cdot', mode='math')\n self.register(\n u'\\N{MIDDLE DOT}',\n u'\\\\textperiodcentered',\n package='textcomp')\n\n self.register(u'\\N{CEDILLA}', u'\\\\c{}')\n\n self.register(u'\\N{SUPERSCRIPT ONE}', u'^1', mode='math')\n self.register(\n u'\\N{SUPERSCRIPT ONE}',\n u'\\\\textonesuperior',\n package='textcomp')\n\n self.register(u'\\N{INVERTED QUESTION MARK}', u'?`')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH GRAVE}', u'\\\\`A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH CIRCUMFLEX}', u'\\\\^A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH TILDE}', u'\\\\~A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH DIAERESIS}', u'\\\\\"A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH RING ABOVE}', u'\\\\AA')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH RING ABOVE}', u'\\\\r A',\n encode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER AE}', u'\\\\AE')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH CEDILLA}', u'\\\\c C')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH GRAVE}', u'\\\\`E')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH ACUTE}', u\"\\\\'E\")\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH CIRCUMFLEX}', u'\\\\^E')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH DIAERESIS}', u'\\\\\"E')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH GRAVE}', u'\\\\`I')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH CIRCUMFLEX}', u'\\\\^I')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH DIAERESIS}', u'\\\\\"I')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH TILDE}', u'\\\\~N')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH GRAVE}', u'\\\\`O')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH ACUTE}', u\"\\\\'O\")\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH CIRCUMFLEX}', u'\\\\^O')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH TILDE}', u'\\\\~O')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH DIAERESIS}', u'\\\\\"O')\n self.register(u'\\N{MULTIPLICATION SIGN}', u'\\\\times', mode='math')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH STROKE}', u'\\\\O')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH GRAVE}', u'\\\\`U')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH ACUTE}', u\"\\\\'U\")\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH CIRCUMFLEX}', u'\\\\^U')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH DIAERESIS}', u'\\\\\"U')\n self.register(u'\\N{LATIN CAPITAL LETTER Y WITH ACUTE}', u\"\\\\'Y\")\n self.register(u'\\N{LATIN SMALL LETTER SHARP S}', u'\\\\ss')\n self.register(u'\\N{LATIN SMALL LETTER A WITH GRAVE}', u'\\\\`a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH ACUTE}', u\"\\\\'a\")\n self.register(u'\\N{LATIN SMALL LETTER A WITH CIRCUMFLEX}', u'\\\\^a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH TILDE}', u'\\\\~a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH DIAERESIS}', u'\\\\\"a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH RING ABOVE}', u'\\\\aa')\n self.register(u'\\N{LATIN SMALL LETTER A WITH RING ABOVE}', u'\\\\r a',\n encode=False)\n self.register(u'\\N{LATIN SMALL LETTER AE}', u'\\\\ae')\n self.register(u'\\N{LATIN SMALL LETTER C WITH CEDILLA}', u'\\\\c c')\n self.register(u'\\N{LATIN SMALL LETTER E WITH GRAVE}', u'\\\\`e')\n self.register(u'\\N{LATIN SMALL LETTER E WITH ACUTE}', u\"\\\\'e\")\n self.register(u'\\N{LATIN SMALL LETTER E WITH CIRCUMFLEX}', u'\\\\^e')\n self.register(u'\\N{LATIN SMALL LETTER E WITH DIAERESIS}', u'\\\\\"e')\n self.register(u'\\N{LATIN SMALL LETTER I WITH GRAVE}', u'\\\\`\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH GRAVE}', u'\\\\`i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH ACUTE}', u\"\\\\'\\\\i\")\n self.register(u'\\N{LATIN SMALL LETTER I WITH ACUTE}', u\"\\\\'i\")\n self.register(u'\\N{LATIN SMALL LETTER I WITH CIRCUMFLEX}', u'\\\\^\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH CIRCUMFLEX}', u'\\\\^i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH DIAERESIS}', u'\\\\\"\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH DIAERESIS}', u'\\\\\"i')\n self.register(u'\\N{LATIN SMALL LETTER N WITH TILDE}', u'\\\\~n')\n self.register(u'\\N{LATIN SMALL LETTER O WITH GRAVE}', u'\\\\`o')\n self.register(u'\\N{LATIN SMALL LETTER O WITH ACUTE}', u\"\\\\'o\")\n self.register(u'\\N{LATIN SMALL LETTER O WITH CIRCUMFLEX}', u'\\\\^o')\n self.register(u'\\N{LATIN SMALL LETTER O WITH TILDE}', u'\\\\~o')\n self.register(u'\\N{LATIN SMALL LETTER O WITH DIAERESIS}', u'\\\\\"o')\n self.register(u'\\N{DIVISION SIGN}', u'\\\\div', mode='math')\n self.register(u'\\N{LATIN SMALL LETTER O WITH STROKE}', u'\\\\o')\n self.register(u'\\N{LATIN SMALL LETTER U WITH GRAVE}', u'\\\\`u')\n self.register(u'\\N{LATIN SMALL LETTER U WITH ACUTE}', u\"\\\\'u\")\n self.register(u'\\N{LATIN SMALL LETTER U WITH CIRCUMFLEX}', u'\\\\^u')\n self.register(u'\\N{LATIN SMALL LETTER U WITH DIAERESIS}', u'\\\\\"u')\n self.register(u'\\N{LATIN SMALL LETTER Y WITH ACUTE}', u\"\\\\'y\")\n self.register(u'\\N{LATIN SMALL LETTER Y WITH DIAERESIS}', u'\\\\\"y')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH MACRON}', u'\\\\=A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH MACRON}', u'\\\\=a')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH BREVE}', u'\\\\u A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH BREVE}', u'\\\\u a')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH OGONEK}', u'\\\\k A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH OGONEK}', u'\\\\k a')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH ACUTE}', u\"\\\\'C\")\n self.register(u'\\N{LATIN SMALL LETTER C WITH ACUTE}', u\"\\\\'c\")\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH CIRCUMFLEX}', u'\\\\^C')\n self.register(u'\\N{LATIN SMALL LETTER C WITH CIRCUMFLEX}', u'\\\\^c')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH DOT ABOVE}', u'\\\\.C')\n self.register(u'\\N{LATIN SMALL LETTER C WITH DOT ABOVE}', u'\\\\.c')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH CARON}', u'\\\\v C')\n self.register(u'\\N{LATIN SMALL LETTER C WITH CARON}', u'\\\\v c')\n self.register(u'\\N{LATIN CAPITAL LETTER D WITH CARON}', u'\\\\v D')\n self.register(u'\\N{LATIN SMALL LETTER D WITH CARON}', u'\\\\v d')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH MACRON}', u'\\\\=E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH MACRON}', u'\\\\=e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH BREVE}', u'\\\\u E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH BREVE}', u'\\\\u e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH DOT ABOVE}', u'\\\\.E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH DOT ABOVE}', u'\\\\.e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH OGONEK}', u'\\\\k E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH OGONEK}', u'\\\\k e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH CARON}', u'\\\\v E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH CARON}', u'\\\\v e')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH CIRCUMFLEX}', u'\\\\^G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH CIRCUMFLEX}', u'\\\\^g')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH BREVE}', u'\\\\u G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH BREVE}', u'\\\\u g')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH DOT ABOVE}', u'\\\\.G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH DOT ABOVE}', u'\\\\.g')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH CEDILLA}', u'\\\\c G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH CEDILLA}', u'\\\\c g')\n self.register(u'\\N{LATIN CAPITAL LETTER H WITH CIRCUMFLEX}', u'\\\\^H')\n self.register(u'\\N{LATIN SMALL LETTER H WITH CIRCUMFLEX}', u'\\\\^h')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH TILDE}', u'\\\\~I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH TILDE}', u'\\\\~\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH TILDE}', u'\\\\~i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH MACRON}', u'\\\\=I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH MACRON}', u'\\\\=\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH MACRON}', u'\\\\=i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH BREVE}', u'\\\\u I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH BREVE}', u'\\\\u\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH BREVE}', u'\\\\u i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH OGONEK}', u'\\\\k I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH OGONEK}', u'\\\\k i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH DOT ABOVE}', u'\\\\.I')\n self.register(u'\\N{LATIN SMALL LETTER DOTLESS I}', u'\\\\i')\n self.register(u'\\N{LATIN CAPITAL LIGATURE IJ}', u'IJ', decode=False)\n self.register(u'\\N{LATIN SMALL LIGATURE IJ}', u'ij', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER J WITH CIRCUMFLEX}', u'\\\\^J')\n self.register(u'\\N{LATIN SMALL LETTER J WITH CIRCUMFLEX}', u'\\\\^\\\\j')\n self.register(u'\\N{LATIN SMALL LETTER J WITH CIRCUMFLEX}', u'\\\\^j')\n self.register(u'\\N{LATIN CAPITAL LETTER K WITH CEDILLA}', u'\\\\c K')\n self.register(u'\\N{LATIN SMALL LETTER K WITH CEDILLA}', u'\\\\c k')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH ACUTE}', u\"\\\\'L\")\n self.register(u'\\N{LATIN SMALL LETTER L WITH ACUTE}', u\"\\\\'l\")\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH CEDILLA}', u'\\\\c L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH CEDILLA}', u'\\\\c l')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH CARON}', u'\\\\v L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH CARON}', u'\\\\v l')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH STROKE}', u'\\\\L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH STROKE}', u'\\\\l')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH ACUTE}', u\"\\\\'N\")\n self.register(u'\\N{LATIN SMALL LETTER N WITH ACUTE}', u\"\\\\'n\")\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH CEDILLA}', u'\\\\c N')\n self.register(u'\\N{LATIN SMALL LETTER N WITH CEDILLA}', u'\\\\c n')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH CARON}', u'\\\\v N')\n self.register(u'\\N{LATIN SMALL LETTER N WITH CARON}', u'\\\\v n')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH MACRON}', u'\\\\=O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH MACRON}', u'\\\\=o')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH BREVE}', u'\\\\u O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH BREVE}', u'\\\\u o')\n self.register(\n u'\\N{LATIN CAPITAL LETTER O WITH DOUBLE ACUTE}',\n u'\\\\H O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH DOUBLE ACUTE}', u'\\\\H o')\n self.register(u'\\N{LATIN CAPITAL LIGATURE OE}', u'\\\\OE')\n self.register(u'\\N{LATIN SMALL LIGATURE OE}', u'\\\\oe')\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH ACUTE}', u\"\\\\'R\")\n self.register(u'\\N{LATIN SMALL LETTER R WITH ACUTE}', u\"\\\\'r\")\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH CEDILLA}', u'\\\\c R')\n self.register(u'\\N{LATIN SMALL LETTER R WITH CEDILLA}', u'\\\\c r')\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH CARON}', u'\\\\v R')\n self.register(u'\\N{LATIN SMALL LETTER R WITH CARON}', u'\\\\v r')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH ACUTE}', u\"\\\\'S\")\n self.register(u'\\N{LATIN SMALL LETTER S WITH ACUTE}', u\"\\\\'s\")\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH CIRCUMFLEX}', u'\\\\^S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH CIRCUMFLEX}', u'\\\\^s')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH CEDILLA}', u'\\\\c S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH CEDILLA}', u'\\\\c s')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH CARON}', u'\\\\v S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH CARON}', u'\\\\v s')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH CEDILLA}', u'\\\\c T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH CEDILLA}', u'\\\\c t')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH CARON}', u'\\\\v T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH CARON}', u'\\\\v t')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH TILDE}', u'\\\\~U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH TILDE}', u'\\\\~u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH MACRON}', u'\\\\=U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH MACRON}', u'\\\\=u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH BREVE}', u'\\\\u U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH BREVE}', u'\\\\u u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH RING ABOVE}', u'\\\\r U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH RING ABOVE}', u'\\\\r u')\n self.register(\n u'\\N{LATIN CAPITAL LETTER U WITH DOUBLE ACUTE}',\n u'\\\\H U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH DOUBLE ACUTE}', u'\\\\H u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH OGONEK}', u'\\\\k U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH OGONEK}', u'\\\\k u')\n self.register(u'\\N{LATIN CAPITAL LETTER W WITH CIRCUMFLEX}', u'\\\\^W')\n self.register(u'\\N{LATIN SMALL LETTER W WITH CIRCUMFLEX}', u'\\\\^w')\n self.register(u'\\N{LATIN CAPITAL LETTER Y WITH CIRCUMFLEX}', u'\\\\^Y')\n self.register(u'\\N{LATIN SMALL LETTER Y WITH CIRCUMFLEX}', u'\\\\^y')\n self.register(u'\\N{LATIN CAPITAL LETTER Y WITH DIAERESIS}', u'\\\\\"Y')\n self.register(u'\\N{LATIN CAPITAL LETTER Z WITH ACUTE}', u\"\\\\'Z\")\n self.register(u'\\N{LATIN SMALL LETTER Z WITH ACUTE}', u\"\\\\'z\")\n self.register(u'\\N{LATIN CAPITAL LETTER Z WITH DOT ABOVE}', u'\\\\.Z')\n self.register(u'\\N{LATIN SMALL LETTER Z WITH DOT ABOVE}', u'\\\\.z')\n self.register(u'\\N{LATIN CAPITAL LETTER Z WITH CARON}', u'\\\\v Z')\n self.register(u'\\N{LATIN SMALL LETTER Z WITH CARON}', u'\\\\v z')\n self.register(u'\\N{LATIN CAPITAL LETTER DZ WITH CARON}', u'D\\\\v Z')\n self.register(\n u'\\N{LATIN CAPITAL LETTER D WITH SMALL LETTER Z WITH CARON}',\n u'D\\\\v z')\n self.register(u'\\N{LATIN SMALL LETTER DZ WITH CARON}', u'd\\\\v z')\n self.register(u'\\N{LATIN CAPITAL LETTER LJ}', u'LJ', decode=False)\n self.register(\n u'\\N{LATIN CAPITAL LETTER L WITH SMALL LETTER J}',\n u'Lj',\n decode=False)\n self.register(u'\\N{LATIN SMALL LETTER LJ}', u'lj', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER NJ}', u'NJ', decode=False)\n self.register(\n u'\\N{LATIN CAPITAL LETTER N WITH SMALL LETTER J}',\n u'Nj',\n decode=False)\n self.register(u'\\N{LATIN SMALL LETTER NJ}', u'nj', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH CARON}', u'\\\\v A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH CARON}', u'\\\\v a')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH CARON}', u'\\\\v I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH CARON}', u'\\\\v\\\\i')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH CARON}', u'\\\\v O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH CARON}', u'\\\\v o')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH CARON}', u'\\\\v U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH CARON}', u'\\\\v u')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH CARON}', u'\\\\v G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH CARON}', u'\\\\v g')\n self.register(u'\\N{LATIN CAPITAL LETTER K WITH CARON}', u'\\\\v K')\n self.register(u'\\N{LATIN SMALL LETTER K WITH CARON}', u'\\\\v k')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH OGONEK}', u'\\\\k O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH OGONEK}', u'\\\\k o')\n self.register(u'\\N{LATIN SMALL LETTER J WITH CARON}', u'\\\\v\\\\j')\n self.register(u'\\N{LATIN CAPITAL LETTER DZ}', u'DZ', decode=False)\n self.register(\n u'\\N{LATIN CAPITAL LETTER D WITH SMALL LETTER Z}',\n u'Dz',\n decode=False)\n self.register(u'\\N{LATIN SMALL LETTER DZ}', u'dz', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH ACUTE}', u\"\\\\'G\")\n self.register(u'\\N{LATIN SMALL LETTER G WITH ACUTE}', u\"\\\\'g\")\n self.register(u'\\N{LATIN CAPITAL LETTER AE WITH ACUTE}', u\"\\\\'\\\\AE\")\n self.register(u'\\N{LATIN SMALL LETTER AE WITH ACUTE}', u\"\\\\'\\\\ae\")\n self.register(\n u'\\N{LATIN CAPITAL LETTER O WITH STROKE AND ACUTE}',\n u\"\\\\'\\\\O\")\n self.register(\n u'\\N{LATIN SMALL LETTER O WITH STROKE AND ACUTE}',\n u\"\\\\'\\\\o\")\n self.register(u'\\N{LATIN CAPITAL LETTER ETH}', u'\\\\DH')\n self.register(u'\\N{LATIN SMALL LETTER ETH}', u'\\\\dh')\n self.register(u'\\N{LATIN CAPITAL LETTER THORN}', u'\\\\TH')\n self.register(u'\\N{LATIN SMALL LETTER THORN}', u'\\\\th')\n self.register(u'\\N{LATIN CAPITAL LETTER D WITH STROKE}', u'\\\\DJ')\n self.register(u'\\N{LATIN SMALL LETTER D WITH STROKE}', u'\\\\dj')\n self.register(u'\\N{LATIN CAPITAL LETTER D WITH DOT BELOW}', u'\\\\d D')\n self.register(u'\\N{LATIN SMALL LETTER D WITH DOT BELOW}', u'\\\\d d')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH DOT BELOW}', u'\\\\d L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH DOT BELOW}', u'\\\\d l')\n self.register(u'\\N{LATIN CAPITAL LETTER M WITH DOT BELOW}', u'\\\\d M')\n self.register(u'\\N{LATIN SMALL LETTER M WITH DOT BELOW}', u'\\\\d m')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH DOT BELOW}', u'\\\\d N')\n self.register(u'\\N{LATIN SMALL LETTER N WITH DOT BELOW}', u'\\\\d n')\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH DOT BELOW}', u'\\\\d R')\n self.register(u'\\N{LATIN SMALL LETTER R WITH DOT BELOW}', u'\\\\d r')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH DOT BELOW}', u'\\\\d S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH DOT BELOW}', u'\\\\d s')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH DOT BELOW}', u'\\\\d T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH DOT BELOW}', u'\\\\d t')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH COMMA BELOW}',\n u'\\\\textcommabelow S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH COMMA BELOW}',\n u'\\\\textcommabelow s')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH COMMA BELOW}',\n u'\\\\textcommabelow T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH COMMA BELOW}',\n u'\\\\textcommabelow t')\n self.register(u'\\N{PARTIAL DIFFERENTIAL}', u'\\\\partial', mode='math')\n self.register(u'\\N{N-ARY PRODUCT}', u'\\\\prod', mode='math')\n self.register(u'\\N{N-ARY SUMMATION}', u'\\\\sum', mode='math')\n self.register(u'\\N{SQUARE ROOT}', u'\\\\surd', mode='math')\n self.register(u'\\N{INFINITY}', u'\\\\infty', mode='math')\n self.register(u'\\N{INTEGRAL}', u'\\\\int', mode='math')\n self.register(u'\\N{INTERSECTION}', u'\\\\cap', mode='math')\n self.register(u'\\N{UNION}', u'\\\\cup', mode='math')\n self.register(u'\\N{RIGHTWARDS ARROW}', u'\\\\rightarrow', mode='math')\n self.register(\n u'\\N{RIGHTWARDS DOUBLE ARROW}',\n u'\\\\Rightarrow',\n mode='math')\n self.register(u'\\N{LEFTWARDS ARROW}', u'\\\\leftarrow', mode='math')\n self.register(\n u'\\N{LEFTWARDS DOUBLE ARROW}',\n u'\\\\Leftarrow',\n mode='math')\n self.register(u'\\N{LOGICAL OR}', u'\\\\vee', mode='math')\n self.register(u'\\N{LOGICAL AND}', u'\\\\wedge', mode='math')\n self.register(u'\\N{ALMOST EQUAL TO}', u'\\\\approx', mode='math')\n self.register(u'\\N{NOT EQUAL TO}', u'\\\\neq', mode='math')\n self.register(u'\\N{LESS-THAN OR EQUAL TO}', u'\\\\leq', mode='math')\n self.register(u'\\N{GREATER-THAN OR EQUAL TO}', u'\\\\geq', mode='math')\n self.register(u'\\N{MODIFIER LETTER CIRCUMFLEX ACCENT}', u'\\\\^{}')\n self.register(u'\\N{CARON}', u'\\\\v{}')\n self.register(u'\\N{BREVE}', u'\\\\u{}')\n self.register(u'\\N{DOT ABOVE}', u'\\\\.{}')\n self.register(u'\\N{RING ABOVE}', u'\\\\r{}')\n self.register(u'\\N{OGONEK}', u'\\\\k{}')\n self.register(u'\\N{DOUBLE ACUTE ACCENT}', u'\\\\H{}')\n self.register(u'\\N{LATIN SMALL LIGATURE FI}', u'fi', decode=False)\n self.register(u'\\N{LATIN SMALL LIGATURE FL}', u'fl', decode=False)\n self.register(u'\\N{LATIN SMALL LIGATURE FF}', u'ff', decode=False)\n\n self.register(u'\\N{GREEK SMALL LETTER ALPHA}', u'\\\\alpha', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER BETA}', u'\\\\beta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER GAMMA}', u'\\\\gamma', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER DELTA}', u'\\\\delta', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER EPSILON}',\n u'\\\\epsilon',\n mode='math')\n self.register(u'\\N{GREEK SMALL LETTER ZETA}', u'\\\\zeta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER ETA}', u'\\\\eta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER THETA}', u'\\\\theta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER THETA}', u'\\\\texttheta',\n package='textgreek', encode=False)\n self.register(u'\\N{GREEK SMALL LETTER IOTA}', u'\\\\iota', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER KAPPA}', u'\\\\kappa', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER LAMDA}',\n u'\\\\lambda',\n mode='math') # LAMDA not LAMBDA\n self.register(u'\\N{GREEK SMALL LETTER MU}', u'\\\\mu', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER NU}', u'\\\\nu', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER XI}', u'\\\\xi', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER OMICRON}',\n u'\\\\omicron',\n mode='math')\n self.register(u'\\N{GREEK SMALL LETTER PI}', u'\\\\pi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER RHO}', u'\\\\rho', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER SIGMA}', u'\\\\sigma', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER TAU}', u'\\\\tau', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER UPSILON}',\n u'\\\\upsilon',\n mode='math')\n self.register(u'\\N{GREEK SMALL LETTER PHI}', u'\\\\phi', mode='math')\n self.register(u'\\N{GREEK PHI SYMBOL}', u'\\\\varphi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER CHI}', u'\\\\chi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER PSI}', u'\\\\psi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER OMEGA}', u'\\\\omega', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER ALPHA}',\n u'\\\\Alpha',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER BETA}', u'\\\\Beta', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER GAMMA}',\n u'\\\\Gamma',\n mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER DELTA}',\n u'\\\\Delta',\n mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER EPSILON}',\n u'\\\\Epsilon',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER ZETA}', u'\\\\Zeta', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER ETA}', u'\\\\Eta', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER THETA}',\n u'\\\\Theta',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER IOTA}', u'\\\\Iota', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER KAPPA}',\n u'\\\\Kappa',\n mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER LAMDA}',\n u'\\\\Lambda',\n mode='math') # LAMDA not LAMBDA\n self.register(u'\\N{GREEK CAPITAL LETTER MU}', u'\\\\Mu', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER NU}', u'\\\\Nu', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER XI}', u'\\\\Xi', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER OMICRON}',\n u'\\\\Omicron',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER PI}', u'\\\\Pi', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER RHO}', u'\\\\Rho', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER SIGMA}',\n u'\\\\Sigma',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER TAU}', u'\\\\Tau', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER UPSILON}',\n u'\\\\Upsilon',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER PHI}', u'\\\\Phi', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER CHI}', u'\\\\Chi', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER PSI}', u'\\\\Psi', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER OMEGA}',\n u'\\\\Omega',\n mode='math')\n self.register(u'\\N{COPYRIGHT SIGN}', u'\\\\copyright')\n self.register(u'\\N{COPYRIGHT SIGN}', u'\\\\textcopyright')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH ACUTE}', u\"\\\\'A\")\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH ACUTE}', u\"\\\\'I\")\n self.register(u'\\N{HORIZONTAL ELLIPSIS}', u'\\\\ldots')\n self.register(u'\\N{TRADE MARK SIGN}', u'^{TM}', mode='math')\n self.register(\n u'\\N{TRADE MARK SIGN}',\n u'\\\\texttrademark',\n package='textcomp')\n self.register(\n u'\\N{REGISTERED SIGN}',\n u'\\\\textregistered',\n package='textcomp')\n # \\=O and \\=o will be translated into ลŒ and ล before we can\n # match the full latex string... so decoding disabled for now\n self.register(u'วฌ', text_type(r'\\textogonekcentered{\\=O}'),\n decode=False)\n self.register(u'วญ', text_type(r'\\textogonekcentered{\\=o}'),\n decode=False)\n self.register(u'โ„•', text_type(r'\\mathbb{N}'), mode='math')\n self.register(u'โ„•', text_type(r'\\mathbb N'), mode='math', decode=False)\n self.register(u'โ„ค', text_type(r'\\mathbb{Z}'), mode='math')\n self.register(u'โ„ค', text_type(r'\\mathbb Z'), mode='math', decode=False)\n self.register(u'โ„š', text_type(r'\\mathbb{Q}'), mode='math')\n self.register(u'โ„š', text_type(r'\\mathbb Q'), mode='math', decode=False)\n self.register(u'โ„', text_type(r'\\mathbb{R}'), mode='math')\n self.register(u'โ„', text_type(r'\\mathbb R'), mode='math', decode=False)\n self.register(u'โ„‚', text_type(r'\\mathbb{C}'), mode='math')\n self.register(u'โ„‚', text_type(r'\\mathbb C'), mode='math', decode=False)", "def password(self) -> str:", "def state_text(self, byte):\n c = chr(byte)\n if byte == telnet_IAC:\n self.next_fn = self.state_cmd\n self.telnet_cmd = []\n elif c in telnet_printable:\n self.inbuffer += c\n if self.termious:\n self.termious_hack(byte)", "def test_alnum(self, address):\n t=address.replace(\" \", \"\").isalnum()\n assert t, \"it only accept digits and letters\"", "def answer(self, layer):\n self.active = True\n return \"Teretulemast mรคngu HANGMAN! \\n รœlesanne on รผlilihtne, sina pakud tรคhti ja mina รผtlen, \" \\\n \"et seda tรคhte \" \\\n \"minu mรตeldud sรตnas ei ole. Okei, nali, kindlasti suudad mรตne tรคhe ka รคra arvata. Aga \" \\\n \"alustame. Paku tรคht vรตi kohe รตige \" \\\n \"sรตna. Mรคngu saab kinni panna kirjutades sรตna 'aitab'. Edu!\"", "def enlabel(mi_, ma_):\n\treturn \"Unicode characters from {} to {} codepoints\".format(mi_, ma_)", "def dummy_junction23():\n return 'junction:chr1:251-299:+'", "def clean_note_prefix(self):\n data = self.cleaned_data[\"note_prefix\"]\n return data.encode(\"ascii\") if data != \"\" else data", "def passive(self, polite=False, positive=False, kanji=False):\n if self.group == 'ichidan':\n base = self.kanji if kanji else self.kana\n base = re.sub('ใ‚‹$', 'ใ‚‰', base)\n else:\n nai_form = self._nai(kanji=kanji)\n base = re.sub('ใชใ„$', '', nai_form)\n if polite:\n suffix = 'ใ‚Œใพใ™' if positive else 'ใ‚Œใพใ›ใ‚“'\n else:\n suffix = 'ใ‚Œใ‚‹' if positive else 'ใ‚Œใชใ„'\n return base + suffix", "def process_address(text):\n return sanitize(text[9:])", "def caesar_cipher_encode(n: int, text: str, p: str) -> str:\n lookup_table = str.maketrans(p, p[n:] + p[:n])\n\n return text.translate(lookup_table)", "def _pinyin(self, rest):\n # Fix if sentence contains some english '.tr yacinๅคช็‰›ไบ†'\n rest = filter(lambda x: not self.isascii(x), rest.decode('utf8'))\n def reduce_reading((char, readings)):\n \"\"\"If a character has multiple cjklib readings, use the fine-tuning\n dict from pinyin toolkit and CEDICT as a backup.\"\"\"\n if len(readings) == 1:\n return readings[0]\n else:\n try:\n return self.pinyin_toolkit_lookup[char]\n except KeyError:\n return self._dict_reading_lookup(char)\n\n readings = [self.char_lookup.getReadingForCharacter(x, 'Pinyin') for x in rest]\n res = u' '.join(map(reduce_reading, zip(rest, readings)))\n return res.encode('utf8')", "def forbidden_latex_chars():\n\n tex_char = ['\\\\', '{', '}', '&', '[', ']', '^', '~']\n chars = ', '.join(['\"{char}\"'.format(char=char) for char in tex_char])\n message = _(u\"Urmฤƒtoarele caractere sunt interzise ศ™i trebuie scoase : {chars}.\".format(chars=chars))\n return tex_char, message", "def GUI_Write_Encoder_Values(self):\n for i in range(3):\n self.encoder_text[i].set(\"%8s microns\"%str(self.read_pos[i]))\n return", "def preprocess_msg(self):\n self.tmp_msg = self.tmp_msg.lower()\n cleared = ''\n for ch in self.tmp_msg:\n if ch in string.ascii_lowercase:\n cleared += ch\n\n c = ''\n for ch in cleared:\n c += '{:02d}'.format(ord(ch) - 97)\n if len(c) % 4 != 0:\n c += '99'\n self.tmp_msg = c\n\n super().preprocess_msg()", "def sanitaze(field):\n return re.sub('[^0-9a-zA-Z]+', '-', str(field))", "def __init__(self, format_char=\"I\"):\r\n\t\tself.format = ENDIANNESS + format_char", "def clean_text_from_private_unicode(line):\n line = re.sub(r\"([\\uE000-\\uF8FF]|\\uD83C[\\uDF00-\\uDFFF]|\\uD83D[\\uDC00-\\uDDFF])\", \" \", line)\n return line", "def clean_text_from_private_unicode(line):\n line = re.sub(r\"([\\uE000-\\uF8FF]|\\uD83C[\\uDF00-\\uDFFF]|\\uD83D[\\uDC00-\\uDDFF])\", \" \", line)\n return line", "def pad_encoded_text(self, encoded_text):\n\n\t\textra_padding = 8 - len(encoded_text) % 8#calculmaos cuanto falta por agregar\n\t\tfor i in range(extra_padding):\n\t\t\tencoded_text += \"0\"\n\n\t\tpadded_info = \"{0:08b}\".format(extra_padding)#le agregamos una informacion adicionar la cual utilizaremos despues al comprimir para saber cuantos 0 le agregamos y despues poder eliminarlos\n\t\tencoded_text = padded_info + encoded_text\n\t\treturn encoded_text", "def encode_parameters(self, text):\n return quote_plus(text, safe='=:&\"')", "def strip_other_charcter():\n pass" ]
[ "0.5532932", "0.55261433", "0.5418894", "0.5382547", "0.537889", "0.5318266", "0.529304", "0.52344793", "0.51329744", "0.51327187", "0.5014577", "0.49969777", "0.49740544", "0.49352726", "0.49252382", "0.48906183", "0.48848042", "0.48798963", "0.48445666", "0.48431173", "0.48275122", "0.48089203", "0.48010188", "0.4800972", "0.47935492", "0.4787642", "0.47784796", "0.47673178", "0.47576228", "0.47557026", "0.47545803", "0.47408637", "0.4736951", "0.47314388", "0.47301927", "0.47223848", "0.47044", "0.47033674", "0.47017938", "0.46931985", "0.46916077", "0.46870005", "0.46855122", "0.46850672", "0.46832505", "0.4681938", "0.46804234", "0.4675015", "0.46664414", "0.46662286", "0.4665214", "0.4665214", "0.46640813", "0.4663027", "0.46620378", "0.4657953", "0.46556085", "0.4650218", "0.46477306", "0.46431583", "0.46385962", "0.4638097", "0.46350265", "0.46311092", "0.4627639", "0.46253502", "0.46223193", "0.4621661", "0.46169624", "0.46072075", "0.45983154", "0.4595431", "0.45953733", "0.45943597", "0.45924887", "0.45886716", "0.4587518", "0.4584489", "0.45842364", "0.45778745", "0.4571276", "0.45711836", "0.45583695", "0.45581475", "0.45505732", "0.454327", "0.4532793", "0.45308357", "0.45245823", "0.45221832", "0.4519175", "0.45142266", "0.45073426", "0.45028245", "0.4502171", "0.45009542", "0.44948387", "0.44948387", "0.4490574", "0.44876233", "0.448129" ]
0.0
-1
Send valid value with dot in PAN fields
def test_14(self): assert 'False' == Api.requestBlock('test-14')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def prepare_value(self, value):\n if value is None:\n return value\n value = value.replace(\" \", \"\").replace(\".\", \"\")\n if value:\n return \"%s.%s.%s.%s\" % (value[0:3], value[3:7], value[7:11], value[11:])\n return value", "def __CheckValue1(self, value, value2):\n if value[-1:] == '.':\n return value + '.'\n return value", "def _validate_senders_correspondent_53D(self, val):\n return val", "def _validate_senders_correspondent_53A(self, val):\n return val", "def _format_contract_number_partyA_21N(self, val):\n return val", "def _check_with_cp_no_format(self, field, value):\n if not self._is_valid_cp_format(value):\n self._error(field, \"Invalid cellphone number format.\")", "def _format_senders_correspondent_53D(self, val):\n account = val.get('ACCOUNT')\n name = val.get('NAME')\n address = val.get('ADDRESS')\n if name and address:\n name = FSwiftWriterUtils.split_text_and_prefix(name, 35)\n address = FSwiftWriterUtils.split_text_and_prefix(address, 35)\n val = FSwiftWriterUtils.allocate_space_for_name_address_without_constraint(name, address)\n if account:\n val = \"/\" + str(account) + \"\\n\" + str(val)\n return val", "def routepack(value):\n return str(value).replace(\"/\",\"!\")", "def _format_senders_correspondent_53A(self, val):\n senders_correspondent_account = val.get('ACCOUNT')\n senders_correspondent_bic = val.get('BIC')\n if senders_correspondent_bic:\n if senders_correspondent_account:\n val = \"/\" + str(senders_correspondent_account) + \"\\n\" + str(senders_correspondent_bic)\n else:\n val = str(senders_correspondent_bic)\n return val", "def _format_details_of_charges_71A(self, val):\n return val", "def getPreventNativeDigitsInField(self):\r\n try:\r\n return eval(self.phone.sx('(send (send (send (send primary-root-container get-focused) get-focused) get-focused) get-prevent-native-digits)', convertToString=True, doNotReport=True).title())\r\n except Exception:\r\n return True", "def _sanitize(opt, value):\n return value if not opt.secret else '*' * 4", "def validate_zone_label(value):\n if not re.match(r'^[a-z0-9][\\.\\-0-9a-z]*[\\.0-9a-z]$', value):\n msg = _(\"Labels must start and end with a letter or digit, \"\n \"and have as interior characters only letters, digits, and hyphen.\")\n raise ValidationError(msg)\n if not value.endswith('.'):\n msg = _(\"Use a fully expanded domain name ending with a dot.\")\n raise ValidationError(msg)\n if len(value) > 63:\n raise ValidationError(_(\"Labels must be 63 characters or less.\"))", "def name_only(value: str) -> str:\n m = re.match(r'\\d+\\.\\d+ (.+)', value)\n if m:\n return m.group(1)\n else:\n return value", "def _validate_details_of_charges_71A(self, val):\n return val", "def clean_phone(self):\n data = self.cleaned_data['phone']\n data = data.strip(' +').replace('-', '')\n if len(data) == 12:\n data = data[3:]\n\n return data", "def get_ip_dotted(self):\r\n return socket.inet_ntoa(struct.pack('>I', self.ip))", "def vyhodnot(pole):\n\tif \"xxx\" in pole:\n\t\treturn(\"x\")\n\telif \"ooo\" in pole:\n\t\treturn(\"o\")\n\telif \"-\" not in pole:\n\t\treturn(\"!\")\n\telse:\n\t\treturn(\"-\")", "def _validate_contract_number_partyA_21N(self, val):\n return val", "def _validate_senders_reference_20(self, val):\n validate_slash_and_double_slash(val, \"Senders Reference\") # .value()\n return val", "def replace_dot(data):\n data = re.sub(\"[.]\", \" . \", data)\n return data", "def test_specialchar(self):\n form_data = self.form_data('vNzwXpzKJyTshvHsuULn')\n form = self.form(data=form_data, user=self.u)\n self.assertFalse(form.is_valid())", "def fixing_annotation(key, n):\n if key + '.' + n not in self.propbank:\n key = key.replace('-', '_')\n return key + '.' + n", "def clean_latitude(self):\n lLatitude = self.cleaned_data['latitude']\n if lLatitude:\n lValue = lLatitude.strip()\n if lValue:\n lRegEx = re.compile(CO_ORD_REGEX)\n if lRegEx.match(lValue) == None:\n raise forms.ValidationError(\"Please enter the location in decimal notation, for example 53.768761 If it ends with N it's positive, if S, then it's negative.\")\n return lLatitude", "def clean_value(self, value):\n raise NotImplementedError(\n gettext('%s cannot be used for request data')\n % self.__class__.__name__)", "def replace_dots(data):\n data = re.sub(':', ' : ', data)\n return data", "def _clean_address(self, field):\n data = self.cleaned_data[field]\n if data != \"\" and not is_valid_address(data):\n raise ValidationError(\"Provided value is not a valid Algorand address!\")\n return data", "def setIgnoreDot(self, value):\n self.setBooleanOption(0, value)", "def __call__(self, value):\n if value is None:\n return value\n\n value = value.replace(\" \", \"\").replace(\".\", \"\")\n if not value.isdigit():\n raise ValidationError(_(\"AHV must contain numbers only\"))\n if len(value) != 13:\n raise ValidationError(_(\"AHV must be 13 numbers long.\"))\n\n if self.ahv_checksum(value[:-1]) != value[-1]:\n raise ValidationError(_(\"Not a valid AHV number.\"))", "def safe_addr(ip_addr):\n return '.'.join(ip_addr.split('.')[:2] + ['xxx', 'xxx'])", "def defangIPaddr(address):\n address_as_list = list(address)\n length_of_address = len(address_as_list)\n for i in range(length_of_address):\n if address_as_list[i] == \".\":\n address_as_list[i] = \"[.]\"\n return \"\".join(address_as_list)", "def test_format_phone_formatted(self):\n number1 = '809.555.1234'\n self.assertEqual(format_phone(number1), '(809) 555-1234')\n number2 = '(888) 555-3456'\n self.assertEqual(format_phone(number2), '(888) 555-3456')", "def __unicode__(self):\n d = ((2, \".\"), (6, \".\"), (10, \"/\"), (15, \"-\"))\n s = list(map(str, self.cnpj))\n \n for i, v in d:\n s.insert(i, v)\n \n r = ''.join(s)\n \n return r", "def test_40_phonenumbers_too_long(self):\n number_phone = self.samples[4]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)", "def replace_three_dots(self):\n self.value = re.sub(self.patterns['replace_three_dots'], ' โ€ฆ', self.value)\n return self", "def _format_bank_operation_code_23B(self, val):\n return val", "def clean_postal_code(self):\n return self.cleaned_data['postal_code'].strip()", "def fmt(cls, n):\n return ''.join(c for c in n if c in cls.ALLOWED).lower()", "def test_20_phonenumbers_UnicodeDecodeError(self):\n number_phone = self.samples[2]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)", "def validate(self, value):\n super(MACAddressField, self).validate(value)\n if value:\n try:\n value = EUI(str(value), dialect=mac_bare)\n return\n except (ValueError, TypeError, ValidationError):\n raise ValidationError(self.error_messages[\"invalid\"] % {\"value\": value})", "def clean_note_prefix(self):\n data = self.cleaned_data[\"note_prefix\"]\n return data.encode(\"ascii\") if data != \"\" else data", "def fill_dots(message):\r\n length = len(message)\r\n power = int(np.ceil(np.log2(length)))\r\n return message + (\".\" * (2**power - length))", "def _boundary_value(self) -> str:\n ...", "def str_prefix__(self):\n s = str(self.avp_code)\n if self.is_vendor:\n s+= \".v\"\n if self.is_mandatory:\n s+= \".m\"\n if self.is_protected:\n s+= \".p\"\n if self.vendor_id!=0:\n s+= \":\"+str(self.vendor_id)\n return s", "def __str__(self):\n\n\t\tif self.rawValue == None: return str()\n\n\t\tx = self.rawValue\n\n\t\tif not x.isdigit() or len(x) != 44 or len(set(x)) == 1:\n\t\t\treturn self.rawValue\n\n\t\treturn '{} {} {} {} {} {} {} {} {} {} {}'.format(x[:4], x[4:8], x[8:12], x[12:16], x[16:20], x[20:24], x[24:28], x[28:32], x[32:36], x[36:40], x[40:44])", "def _format_intermediary_institution_56C(self, val):\n if val:\n val = \"/\" + str(val)\n return val", "def format_ethernet(value, mask):\n value_ether = \":\".join(re.findall('..', \"{:012x}\".format(value)))\n if mask is None:\n return value_ether\n value_mask = \":\".join(re.findall('..', \"{:012x}\".format(mask)))\n return \"{}/{}\".format(value_ether, value_mask)", "def dot_name(number):\n\tif number > 0:\n\t\treturn \"P {}\".format(number)\n\telse:\n\t\treturn \"O {}\".format(-number)", "def test_format_phone_raw(self):\n number = '8095551234'\n self.assertEqual(format_phone(number), '(809) 555-1234')", "def mask_acct_no(column):\n return column.str.replace(r'\\d*\\*{3,}\\d*|\\d+(\\-\\d+){2,}', ' $ACCT_NO ')", "def clean_phone(self):\n phone = self.cleaned_data['phone']\n if phone.startswith('8') and len(phone) > 7:\n return phone.replace('8', '+7', 1)\n\n return phone", "def test_formatted_number(self):\n node = self.create_xml_patient({'Mobile_Number': '(33)-0001112222'})\n payload = self.create_payload([node])\n parse_patient(node, payload)\n patient = payload.patients.all()[0]\n self.assertEqual(patient.contact.phone, '+330001112222')", "def _validate_bank_operation_code_23B(self, val):\n return val", "def _setsenders_correspondent_53D(self, val):\n self.swift_obj.SendersCorrespondent_D = val\n self.swift_obj.SendersCorrespondent_D.swiftTag = \"53D\"", "def test_str(self):\n self.assertEqual(\n \"\\N{SNOWMAN}\",\n bytes_to_str(\"\\N{SNOWMAN}\"),\n )", "def dotted(self) -> str:\n return \".\".join(str(v) for v in self.value)", "def _write_pln_line(self, file, field, value):\n\n # Add whitespace to the field name.\n field_str = \"{:25}\".format(field)\n\n # Use an empty string for any None values and 'NaN' for any nan values.\n value_str = str(value)\n value_str = (\"\" if value_str == \"None\" else value_str)\n value_str = (\"NaN\" if value_str == \"nan\" else value_str)\n\n # Combine the field and value strings and write this to the file.\n file.write(\"\".join([field_str, value_str, \"\\n\"]))", "def fixNumber(sval):\n\n r, val = VALID_RE.match(sval.strip()).groups()\n parts = VALPARTS_RE.findall(val)\n dpart = parts.pop(-1)\n if parts:\n return (r or \"\") + \"\".join(parts) + \".\" + dpart\n return (r or \"\") + dpart", "def _format_accept_value(self, lang, priority=1.0):\r\n return \"{};q={}\".format(lang, priority)", "def raw(self, raw):\n self.uw.send('%s.val = %.4f' % (self.name, self.clipRawLimits(raw)))", "def clean_value(self, value):\n return value", "def get_prep_value(self, value):\n return str(value)", "def format_verification(verification: Optional[str]) -> str:\n if not verification:\n return \"null\"\n return verification.replace('\"', \"`\")", "def safe_value(name, value):\n if name.lower() in LOGGER_SENSITIVE_HEADERS:\n prefix_length = logger_settings.get('reveal_sensitive_prefix', 16)\n prefix_length = int(\n min(prefix_length, (len(value) ** 2) / 32, len(value) / 2)\n )\n redacted_value = value[0:prefix_length]\n return redacted_value + '...'\n return value", "def clean_receiver(self):\n data = self.cleaned_data[\"receiver\"]\n if not is_valid_address(data):\n raise ValidationError(\"Provided value is not a valid Algorand address!\")\n return data", "def clean_honeypot(self):\n value = self.cleaned_data[\"honeypot\"]\n if value:\n raise forms.ValidationError(self.fields[\"honeypot\"].label)\n return value", "def clean_honeypot(self):\n value = self.cleaned_data[\"honeypot\"]\n if value:\n raise forms.ValidationError(self.fields[\"honeypot\"].label)\n return value", "def _format_account_with_institution_57C(self, val):\n if val:\n val = \"/\" + str(val)\n return val", "def clean_fields(self, *args, **kwargs):\n if self.ipi_name:\n self.ipi_name = self.ipi_name.zfill(11)\n if self.ipi_base:\n self.ipi_base = self.ipi_base.replace(\".\", \"\").upper()\n self.ipi_base = re.sub(\n r\"(I).?(\\d{9}).?(\\d)\", r\"\\1-\\2-\\3\", self.ipi_base\n )\n return super().clean_fields(*args, **kwargs)", "def validate_pin_input(value):\n try:\n int(value)\n return f\"D{value}\"\n except ValueError:\n return value.upper()", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def escape_dot(s):\n\treturn s. \\\n\t\treplace(\"{\", \"\\\\{\").\\\n\t\treplace(\"}\", \"\\\\}\").\\\n\t\treplace(\"\\n\", \"\").\\\n\t\treplace(\"\\r\", \"\")", "def test_prompt_ask_var_period_invalid(self):\n self.expected['failed'] = True\n self.expected['msg'] = \"Invalid character in 'ask' parameter 'hello.world'.\"\n\n self.assertEquals(\n self.prompt._prompt(self.response, {\n \"ask\": \"hello.world\"\n }),\n self.expected\n )", "def validate_phone(self, value):\n pattern = re.compile(r'(^[+0-9]{1,3})*([0-9]{8,15}$)', re.IGNORECASE)\n value = value.replace(\" \", \"\")\n if pattern.match(value) is None:\n raise ValidationError(_('Please insert correct phone number.'))\n return value", "def test_value_special_chars(self):\n raw = [\n 0x48,\n 0x65,\n 0x79,\n 0x21,\n 0x3F,\n 0x24,\n 0x20,\n 0xC4,\n 0xD6,\n 0xDC,\n 0xE4,\n 0xF6,\n 0xFC,\n 0xDF,\n ]\n string = \"Hey!?$ ร„ร–รœรครถรผรŸ\"\n self.assertEqual(DPTString.to_knx(string), raw)\n self.assertEqual(DPTString.from_knx(raw), string)", "def safe_number(self):\n mask = '*' * (len(self.account_number) - 4)\n return '{0}{1}'.format(mask, self.account_number[-4:])", "def PN_callback(ds, data_element):\n if data_element.VR == \"PN\":\n data_element.value = 'Anonymous'", "def parse(self,value):\r\n\t\treturn str(value)", "def clean_pax(self):\n value = self.cleaned_data[\"pax\"]\n if value > 50:\n raise forms.ValidationError(_(\"Sicuro del numero di persone?\"))\n return value", "def _format_senders_reference_20(self, val):\n if val:\n sett_obj = acm.FSettlement[str(val)]\n val = \"%s-%s-%s-%s\" % (get_settlement_reference_prefix(), str(val), str(get_message_version_number(sett_obj)), str(self.swift_message_type[2:5]))\n return val", "def test_label_seconde(self):\n self.assertIsInstance(self.address.label_second, str)\n self.assertEqual(self.address.label_second, \"\")", "def _aa_host_name(self):\n self.is_option = True\n self.is_statement = False\n self.has_validator = True\n if not (self.value.startswith('\"') and self.value.endswith('\"')):\n self.value = '\"' + self.value + '\"'\n validate_name(self.value.strip('\"'))", "def test_00_phonenumbers_formatting_en_US(self):\n number_phone = self.samples[0]\n res = self.pn._symbol_set_char(number_phone)\n self.assertEqual(res, '+19545551234', 'e164 phone formatting failed')\n res = self.pn._symbol_get(number_phone)\n self.assertEqual(res, '+1 954-555-1234', 'International phone formatting failed')", "def test_ends_with_dollar_sign(self):\n try:\n field_name_validator('id$')\n except ValidationError:\n self.fail('Field name raised ValidationError unexpectedly')", "def __repr__(self):\n return f\"{self.vip}/{self.mask}\"", "def __unicode__(self):\n\n d = ((3, \".\"), (7, \".\"), (11, \"-\"))\n s = list(map(str, self.cpf))\n\n for i, v in d:\n s.insert(i, v)\n\n r = ''.join(s)\n\n return r", "def sanitize_dot(func):\n return str(func).replace(\"::\", \"\\\\\")", "def send_pan(self, value=63, ch=None):\n self.send_control_change(PAN, value, ch=ch)", "def clean_longitude(self):\n lLongitude = self.cleaned_data['longitude']\n if lLongitude:\n lValue = lLongitude.strip()\n if lValue:\n lRegEx = re.compile(CO_ORD_REGEX)\n if lRegEx.match(lValue) == None:\n raise forms.ValidationError(\"Please enter the location in decimal notation, for example -1.82182 If it ends with E it's positive, if W, then it's negative.\")\n return lLongitude", "def test_clean_ip(self):\n\n raw_ip = 'client=mail-ed1-f51.google.com[209.85.208.51]'\n result = clean_ip(raw_ip)\n self.assertEqual(result, '209.85.208.51')", "def format_field_with_flag(self, data):\n return data.strip() == '*'", "def routeunpack(value):\n return str(value).replace(\"!\",\"/\")", "def validate_phonenumber(self):\n special_chars = set(string.punctuation.replace('+', ''))\n for number in self.telefono:\n if number.isalpha() or number in special_chars:\n raise OspiteExc('Il campo numero di telefono non รจ valido')", "def format_value(content):\n try:\n content += \"\"\n except TypeError:\n content = \", \".join(content)\n return remove_break_lines_characters(\n content).strip().replace('^', PRESERVECIRC)", "def test_phone_too_short(self):\n phone = Report(\n contact_phone='202',\n )\n\n try:\n phone.full_clean()\n except ValidationError as err:\n phone_error_message = err.message_dict['contact_phone']\n self.assertTrue(phone_error_message == ['Enter a valid value.'])", "def escape_values(bfo):\n return 0" ]
[ "0.5938703", "0.56705576", "0.5377418", "0.53041655", "0.5300826", "0.52765805", "0.52583504", "0.5224343", "0.51931864", "0.5167671", "0.5139161", "0.51147896", "0.50964856", "0.5069219", "0.50668114", "0.50606614", "0.5044426", "0.50388306", "0.5020225", "0.49950588", "0.4994453", "0.49900594", "0.49857947", "0.4982777", "0.49571532", "0.49527395", "0.49357826", "0.49296334", "0.49271768", "0.4925881", "0.49201486", "0.4908804", "0.48950732", "0.4894046", "0.48940453", "0.4887991", "0.48854673", "0.48646194", "0.48609784", "0.48588452", "0.4848096", "0.48439664", "0.4830033", "0.48123994", "0.48118055", "0.48106295", "0.47731617", "0.47682875", "0.4768279", "0.47617993", "0.47526428", "0.47502172", "0.47472966", "0.4744604", "0.47413385", "0.47409156", "0.47302395", "0.4708263", "0.47067076", "0.47016317", "0.47007748", "0.469872", "0.46938133", "0.46935692", "0.46858895", "0.46832278", "0.46832278", "0.46776634", "0.46773544", "0.46744752", "0.46719676", "0.46719676", "0.46719676", "0.46719676", "0.46719676", "0.46719676", "0.46714854", "0.46713755", "0.46606117", "0.4658814", "0.46557084", "0.46525145", "0.4651076", "0.46475786", "0.46463257", "0.46435988", "0.46386015", "0.46385115", "0.46361354", "0.4632502", "0.4616301", "0.46065098", "0.46041706", "0.45988172", "0.459587", "0.45905104", "0.45890468", "0.45845082", "0.45835254", "0.45728323", "0.45728108" ]
0.0
-1
Send null value in EMonth fields
def test_15(self): assert 'False' == Api.requestBlock('test-15')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def month(self):\n return 0", "def month(self):\n return 0", "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def _fill_date(self):\n if not self.date['year']:\n self.date['year'] = self.DEFAULT_DATE['year']\n if not self.date['month']:\n self.date['month'] = self.DEFAULT_DATE['month']\n if not self.date['day']:\n self.date['day'] = self.DEFAULT_DATE['day']", "def addingNull(self, database):\r\n try:\r\n date = self.lineWidgets[\"FECHA\"].text()\r\n try:\r\n month = int(date.split(\"-\")[1])\r\n except ValueError:\r\n month = int(date.split(\"-\")[1][0])\r\n year = int(date.split(\"-\")[0])\r\n self.conn = connect(\"database.sqlite\")\r\n self.cur = self.conn.cursor()\r\n self.cur.execute(\r\n f'''INSERT INTO {database} (date, month_id, year, concept, \r\n value) VALUES(?, ?, ?, ?, ?)\r\n ''', (date, month, year, \"NADA\", 0))\r\n self.conn.commit()\r\n self.cur.close()\r\n except (ValueError, IndexError):\r\n QMessageBox.critical(\r\n self, \"ERROR\", '''Put the date in its correct form''')", "def set_Month(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Month', value)", "def get_month():\n return handle_invalid_inputs(question_3, months)", "def nullValueToNan(self) -> None:\n self.cpp.nullValueToNan()", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def __nonzero__(self):\n return not (self.year is None and\n self.month is None and\n self.day is None)", "def month(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"month\")", "def form_InputDateNoneValue(request):\n schema = schemaish.Structure()\n schema.add('inputStrip', schemaish.Date(default=datetime.date(1900,1,1)))\n\n form = formish.Form(schema, 'form')\n form['inputStrip'].widget = formish.Input(empty=datetime.date(1900,1,1),roundtrip_empty=True)\n return form", "def get_is_null_label(self):\n return pgettext_lazy('listfilter AbstractDateTime', 'Has no value')", "def default_na_value(self):\n dkind = self.dtype.kind\n if dkind == \"M\":\n return np.datetime64(\"nat\", self.time_unit)\n else:\n raise TypeError(\n \"datetime column of {} has no NaN value\".format(self.dtype)\n )", "def effective_invoice_month(self) -> pulumi.Input['GoogleTypeDateArgs']:\n return pulumi.get(self, \"effective_invoice_month\")", "def decrement_month(self):\n month: int = int(self.month)\n month -= 1\n if month == 0:\n month == 12\n year: int = int(self.year)\n year -= 1\n self.year = str(year)\n self.month = str(month)\n if len(self.month) == 1:\n self.month = \"0\" + self.month", "def test_param_year_is_none(self):\n test_date = get_by_values(Ordinal.first, Weekday.Saturday, Month.May)\n self.assertEquals(date.today().year, test_date.year)", "def __month(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"month\",\n operand1=self,\n operand2=None\n )", "def month(self):\n return self._months", "def month(self):\n return self.__month", "def nullValueToZero(self) -> None:\n self.cpp.nullValueToZero()", "def set_start_month(self, month):\n return self.form.set_value(\"output period \\\"month from\\\"\", MONTHS[month - 1])", "def date(self, date):\n self.value = date.strftime(\"%Y-%m-%d\") if date else \"\"", "def form_DateDifferentEmpty(request):\n schema = schemaish.Structure()\n schema.add('myDateField', schemaish.Date())\n form = formish.Form(schema, 'form')\n form['myDateField'].widget = formish.Input(empty=datetime.date.today())\n return form", "def aMonth(self):\n return self._amon", "def _parse_date_wo_default_month_day(self, field):\r\n # It's not trivial to replace dateutil b/c parsing timezones as Z, +03:30, -400 is hard in python\r\n # however, we don't want dateutil to default the month or day (but some tests at least expect\r\n # us to default year); so, we'll see if dateutil uses the defaults for these the hard way\r\n result = dateutil.parser.parse(field, default=self.PREVENT_DEFAULT_DAY_MON_SEED1)\r\n result_other = dateutil.parser.parse(field, default=self.PREVENT_DEFAULT_DAY_MON_SEED2)\r\n if result != result_other:\r\n log.warning(\"Field {0} is missing month or day\".format(self._name, field))\r\n return None\r\n if result.tzinfo is None:\r\n result = result.replace(tzinfo=UTC)\r\n return result", "def test_invalid_month_orig(self):\n year, month, error = clean_year_month(2014, 3, 13)\n self.assertEqual(year, 2014)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def month_digit(self, year, month, day):\n if (month[0] == '0' and len(month) > 2 or\n day.isdigit() and day[0] == '0' and len(day) > 2):\n Input.change_display(self, self.entries[4],\n 'Remove preceding zeros')\n else:\n Input.day_of_months(self, year, month, day.lstrip('0'))", "def test_non_input(self):\n from sosbeacon.utils import format_datetime\n\n encoded = format_datetime(None)\n self.assertEqual('', encoded)", "def clearField(self):\n self.field.setDate(datetime.now().date())", "def blank_future_eta(request):\n today = datetime.datetime.today()\n today = today.date()\n\n orders = OrderDetail.objects.filter(eta__gt=today)\n for order in orders:\n order.eta = None\n order.save()\n\n return HttpResponse('ok', mimetype='text/plain')", "def setMonth(self, *args):\n return _libsbml.Date_setMonth(self, *args)", "def pMonth(self):\n return self._pmon", "def test_none_start(self):\n self.assertEquals(self.event.start, None)\n self.event.all_day = True\n self.assertEquals(self.event.start, None)", "def fillna_method(request: Any) -> Any:\n return request.param", "def fillna_method(request: Any) -> Any:\n return request.param", "def test_parse_none_time_of_day(self):\n\n res = sf_c.parse_time_of_day(None)\n self.assertIs(res, None)", "def day_of_month(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"day_of_month\")", "def test_valid_month(self):\n ar_month = self.ar[2009][11]\n self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))", "def __init__(__self__, *,\n day_of_month: Optional[pulumi.Input[str]] = None,\n day_of_week: Optional[pulumi.Input['RefreshScheduleMapScheduleFrequencyPropertiesRefreshOnDayPropertiesDayOfWeek']] = None):\n if day_of_month is not None:\n pulumi.set(__self__, \"day_of_month\", day_of_month)\n if day_of_week is not None:\n pulumi.set(__self__, \"day_of_week\", day_of_week)", "def __init__(self, date_datetime_or_none=_marker,\n year=None, month=None, day=None):\n if date_datetime_or_none is not _marker:\n if isinstance(date_datetime_or_none, (datetime, date)):\n self.year = date_datetime_or_none.year\n self.month = date_datetime_or_none.month\n self.day = date_datetime_or_none.day\n elif date_datetime_or_none is None:\n self.year = self.month = self.day = None\n else:\n raise TypeError(\"Can't construct a NullableDate out of %s.\" % (\n date_datetime_or_none,))\n else:\n self.year = year\n self.month = month\n self.day = day", "def _get_complex_null_value(complex):\n return _COMPLEX_NULL_VALUE", "def month(self):\n return self._month", "def month(self):\n return self._month", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def get_is_not_null_label(self):\n return pgettext_lazy('listfilter AbstractDateTime', 'Has value')", "def set_month(self, month):\r\n\t\tmonths = ['Enero', 'Febrero', 'Marzo', 'Abril',\r\n\t\t\t\t 'Mayo', 'Junio', 'Julio', 'Agosto'\r\n\t\t\t\t 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre']\r\n\t\tfor i in range(12):\r\n\t\t\tif month == i: \r\n\t\t\t\treturn months[i-1]", "def test_none_handling(self):\r\n dt = DatetimeTest.create(test_id=0, created_at=None)", "def test_no_due_date(self):\r\n node = object()\r\n self.assertEqual(self.call_fut(node), None)", "def month(self, month):\n\n self._month = month", "def set_finish_month(self, month):\n return self.form.set_value(\"output period \\\"month to\\\"\", MONTHS[month - 1])", "def test_none_handling(self):\n dt = DatetimeTest.create(test_id=0, created_at=None)", "def set_null(self, /, *defaults: Any, **kwargs: Any) -> \"fn\":\n return self._mod.set_null(self._func, *defaults, **kwargs)", "def _validate(year, month, day):\n if day is not None and month is None:\n raise ValueError(\"Day without month\")\n if day is None:\n day = 1\n if month is None:\n month = 1\n if year is None:\n year = 2000\n # actual validation happens here\n datetime.date(year, month, day)", "def test_setter_no_value(self):\n root = netapp_api.NaElement('root')\n root['k'] = None\n self.assertIsNone(root.get_child_content('k'))", "def test_setter_no_value(self):\n root = netapp_api.NaElement('root')\n root['k'] = None\n self.assertIsNone(root.get_child_content('k'))", "def _deserialize_null(self, *args):\n return None", "def NULL(self, t):\n t.value = None\n return t", "def __init__(self, init_month, init_day, init_year):\n # add the necessary assignment statements below\n self.month = init_month\n self.day = init_day\n self.year = init_year", "def get_default():\n today = datetime.date.today()\n if today.month == 1:\n return YearMonth(today.year - 1, 12)\n return YearMonth(today.year, today.month - 1)", "def __init__(self, month, key):\n self.__parent__ = month\n self.__name__ = key\n self.blog = month.blog\n self.month = month\n self.year = month.__parent__\n self.number = int(key)\n self.date = datetime.date(self.year.number, self.month.number, self.number)", "def default(self, obj):\n if isinstance(obj, (dt.date, dt.datetime)):\n return obj.isoformat()", "def month(self) -> str:\r\n return self._month", "def news_for_month(self):\n\n raise NotImplementedError", "def test_parse_none_active_days(self):\n\n res = sf_c.parse_active_days(None)\n self.assertIs(res, None)", "def _set_month(self, month) -> bool:\n if self.set_start_month(month) is False:\n return False\n return self.set_finish_month(month)", "def get_month(x):\n return x[\"SALE DATE\"].month", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n set_fields_to_required(self, ['end_date'])", "def test_null_as_null_indicator(self):\n self.custom_null_indicator_template('null')", "def null_values(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OneDashboardPageWidgetBillboardNullValueArgs']]]]:\n return pulumi.get(self, \"null_values\")", "def setIndexMonth(self,index):\n self.indexMonth = index", "def field_to_csv_SchedulingDate(theDate, id):\n if theDate is None or (theDate.Day == 0 and theDate.Month == 0 and theDate.Year == 0):\n return DELETE_VALUE if id and id > 0 else None\n # this case is what the suds creates by default. return None instead of a delete value\n elif theDate.Day is None and theDate.Month is None and theDate.Year is None:\n return None\n return '{0!s}/{1!s}/{2!s}'.format(theDate.Month, theDate.Day, theDate.Year)", "def __getMonth(self,xml):\n\t\t#TODO: Monat, Jahr, SollStunden, Urlaub,ZeitdiffAkt, ZeitdiffVor, erweitert\n\t\tdayTypeMapping = {'Arbeitstag': DayType.work,\n\t\t\t\t\t\t'Wochenende': DayType.weekend,\n\t\t\t\t\t\t'Urlaub': DayType.vacation,\n\t\t\t\t\t\t'Feiertag': DayType.holiday,\n\t\t\t\t\t\t'Krankheit': DayType.illness,\n\t\t\t\t\t\t'รœberstunden genommen': DayType.overtime_free,\n\t\t\t\t\t\t'Dienstreise': DayType.business_trip,\n\t\t\t\t\t\t'Freistellung': DayType.unpaid_free}\n\t\tworkdays = {}\n\t\tmonthNum = int(xml.find('Monat').text)\n\t\tyearNum = int(xml.find('Jahr').text)\n\t\tif xml.find('Erweitert').text == 'true':\n\t\t\textendedFormat = True\n\t\telse:\n\t\t\textendedFormat = False\n\t\tfor panday in xml.findall('Tag'):\n\t\t\t# parse\n\t\t\tnumday = int(panday.find('Datum').text)\n\t\t\tdaytype = panday.find('TagesTyp').text\n\t\t\tdescription = panday.find('Bemerkung').text\n\t\t\tmorning = panday.find('Vormittag').text\n\t\t\tafternoon = panday.find('Nachmittag').text\n\t\t\tif extendedFormat:\n\t\t\t\tthird = panday.find('Dritte').text\n\t\t\t\tfourth = panday.find('Vierte').text\n\t\t\telse:\n\t\t\t\tthird = None\n\t\t\t\tfourth = None\n\t\t\t# convert\n\t\t\tdaytype = dayTypeMapping[daytype]\n\t\t\tmorning = self. _parsePANTimeRange(morning)\n\t\t\tafternoon = self. _parsePANTimeRange(afternoon)\n\t\t\tthird = self. _parsePANTimeRange(third)\n\t\t\tfourth = self. _parsePANTimeRange(fourth)\t\t\t\n\t\t\ttimeblocks = [morning, afternoon, third, fourth]\n\t\t\ttimeblocks = list(filter(None, timeblocks))\n\t\t\t# save\n\t\t\tday = WorkDay(daytype, description, timeblocks)\n\t\t\tworkdays[numday] = day\n\t\tmonth = WorkMonth(yearNum,monthNum,workdays)\n\t\treturn month", "def setNone(self):\n self.setValue([])", "def _get_months(self, cr, uid, context):\n months=[(str(n),str(n)) for n in range(1,13)]\n return months", "def _default_value(self):\n return None", "def showPreviousMonth(self):\n pass", "def test_null_field(self):\r\n problem = self.get_item_from_modulestore(self.problem_usage_key, True)\r\n self.assertIsNotNone(problem.markdown)\r\n self.client.ajax_post(\r\n self.problem_update_url,\r\n data={'nullout': ['markdown']}\r\n )\r\n problem = self.get_item_from_modulestore(self.problem_usage_key, True)\r\n self.assertIsNone(problem.markdown)", "def none_to_empty(data):\n return data if data is not None else ''", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")" ]
[ "0.5866896", "0.5866896", "0.57713896", "0.5746307", "0.57161075", "0.56837165", "0.5625904", "0.55780524", "0.5558474", "0.54825336", "0.53664184", "0.532997", "0.5325653", "0.530205", "0.52342147", "0.5223373", "0.5202048", "0.51526874", "0.5086805", "0.5086271", "0.50522536", "0.5041742", "0.50123155", "0.50083774", "0.50059336", "0.49894232", "0.49756497", "0.49548796", "0.49456793", "0.49443004", "0.49439943", "0.49401116", "0.49290597", "0.49266466", "0.4917134", "0.4917134", "0.49091017", "0.48985937", "0.48969612", "0.48923644", "0.48825106", "0.4879143", "0.4876903", "0.4876903", "0.48634788", "0.48634788", "0.4860026", "0.4846129", "0.48430932", "0.4828422", "0.48254466", "0.4822408", "0.4811579", "0.48081103", "0.48043117", "0.4797029", "0.4797029", "0.4796717", "0.47943595", "0.47913983", "0.47833773", "0.4773158", "0.47655267", "0.47649068", "0.47542652", "0.47539076", "0.4747959", "0.4746592", "0.47459573", "0.47413078", "0.47339374", "0.4726623", "0.471572", "0.47007757", "0.47006065", "0.46960148", "0.4682554", "0.46773985", "0.46757478", "0.46736747", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813", "0.4670813" ]
0.0
-1
Send special characters in EMonth fields
def test_16(self): assert 'False' == Api.requestBlock('test-16')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _french_month(month):\n mois = \"janvfรฉvrmarsavr-mai-juinjuilaoรปtseptoct-nov-dรฉc-\"\n mois_loc = re.search(month.lower(), mois.lower())\n if mois_loc:\n mois_no = (mois_loc.start() + 4) / 4\n return \"0{}\".format(mois_no)", "def _monthName(self, month):\n if month is not None:\n month = self.tr(\"MONTH.\" + month.upper())\n return month", "def fix_single_digit_month(connection):\n _update_date_by_regexp(connection=connection,\n regexp=\"^[0-9]{1}/[0-9]{1,2}/[0-9]{4}$\",\n new_value=\"CONCAT('0', cav.attribute_value)\")", "def get_month_name(month_no):\n locale.setlocale(locale.LC_TIME, \"et-EE\")\n return calendar.month_name[month_no]", "def get_month():\n return handle_invalid_inputs(question_3, months)", "def shortDate(self, date):\n return u'%s %02i' % (date.pMonth(), date.day())", "def set_Month(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Month', value)", "def month(self) -> str:\r\n return self._month", "def month_digit(self, year, month, day):\n if (month[0] == '0' and len(month) > 2 or\n day.isdigit() and day[0] == '0' and len(day) > 2):\n Input.change_display(self, self.entries[4],\n 'Remove preceding zeros')\n else:\n Input.day_of_months(self, year, month, day.lstrip('0'))", "def monthly_schedule(self,month):\n response = requests.get(f'http://company.com/{self.lname}/{month}')\n if response.ok:\n return response.text\n else:\n return 'Bad Response!'", "def formatmonth(self, theyear, themonth, withyear=True):\n\n schedules = Calendar_Group.objects.filter(day__month=themonth)\n\n v = []\n a = v.append\n a('<div class=\"table-responsive\"><table class=\"table table-bordered\" cellpadding=\"0\" cellspacing=\"0\" class=\"month\">')\n a('\\n')\n a(self.formatmonthname(theyear, themonth, withyear=withyear))\n a('\\n')\n a(self.formatweekheader())\n a('\\n')\n for week in self.monthdays2calendar(theyear, themonth):\n a(self.formatweek(week, schedules))\n a('\\n')\n a('</table></div>')\n a('\\n')\n return ''.join(v)", "def wordmonth(self, month):\n monthname = [word for word in self.months if word.istitle()]\n Month = int(month) -1\n return monthname[Month]", "def formatmonth(self, theyear, themonth, withyear=True):\n\n events = Event.objects.filter(day__month=themonth)\n\n v = []\n a = v.append\n a('<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" class=\"month\">')\n a('\\n')\n a(self.formatmonthname(theyear, themonth, withyear=withyear))\n a('\\n')\n a(self.formatweekheader())\n a('\\n')\n for week in self.monthdays2calendar(theyear, themonth):\n a(self.formatweek(week, events))\n a('\\n')\n a('</table>')\n a('\\n')\n return ''.join(v)", "def test_date_accept_this_month(self):\n spi_search = \"find date this month\"\n inv_search = \"year:\" + datetime.datetime.strftime(datetime.datetime.today(), '%Y-%m')\n self._compare_searches(inv_search, spi_search)", "def mm(self):\n return '%02d' % self._month", "def year_month(cls,\n year: typing.Union[int, str],\n month: typing.Union[int, str])->str:\n yearstr: str\n if isinstance(year, int):\n yearstr = str(year)\n else:\n yearstr = year\n\n monthstr: str\n if isinstance(month, int):\n monthstr = str(month)\n else:\n monthstr = month\n if len(monthstr) == 1:\n monthstr = \"0\" + monthstr\n return cls.DATE_AND_TIMES_SIGIL + yearstr + \"-\" + monthstr + \"-01T00:00:00/10\"", "def clean_date(date):\n months = [\n \"JAN\",\n \"FEB\",\n \"MAR\",\n \"APR\",\n \"MAY\",\n \"JUN\",\n \"JUL\",\n \"AUG\",\n \"SEP\",\n \"OCT\",\n \"NOV\",\n \"DEC\",\n ]\n index = 0\n prefix = date[:7]\n result = \"\"\n data = date[7:].upper().strip()\n letter = word = False\n while index < len(data):\n if data[index] == \"0\" and not word:\n index = index + 1\n continue\n if data[index] == \" \":\n word = False\n else:\n word = True\n if data[index].isalpha:\n letter = True\n elif data[index].isdigit and letter:\n result = result + \" \"\n word = False\n result = result + data[index]\n index = index + 1\n\n for month in months:\n if month in result:\n match = re.search(r\"\" + month + \"\\w+\", result)\n if not match:\n logging.info(\n \"MONTH: {} RESULT: {} BUT MATCH NONE?\".format(month, result)\n )\n else:\n result = result.replace(str(match.group()), month)\n\n result = result.replace(\"ABOUT\", \"ABT\")\n result = result.replace(\"BEFORE\", \"BEF\")\n result = result.replace(\"AFTER\", \"AFT\")\n result = result.replace(\"BETWEEN\", \"BET\")\n result = result.replace(\"FROM\", \"\")\n result = result.replace(\"TO\", \"AND\")\n\n if \"AND\" in result and \"BET\" not in result:\n result = \"BET {0}\".format(result)\n\n if \"-\" in result:\n split = result.split(\"-\")\n if result[:1] == \"-\":\n result = \"BEF {0}\".format(split[1])\n elif result[-1:] == \"-\":\n result = \"AFT {0}\".format(split[0])\n elif len(split) == 2:\n result = \"BET {0} AND {1}\".format(split[0], split[1])\n\n while \" \" in result:\n result = result.replace(\" \", \" \")\n\n return \"{0}{1}\\n\".format(prefix, result)", "def monthname(self):\n return self.strftime(\"%B\")", "def set_month(self, month):\r\n\t\tmonths = ['Enero', 'Febrero', 'Marzo', 'Abril',\r\n\t\t\t\t 'Mayo', 'Junio', 'Julio', 'Agosto'\r\n\t\t\t\t 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre']\r\n\t\tfor i in range(12):\r\n\t\t\tif month == i: \r\n\t\t\t\treturn months[i-1]", "def set_start_month(self, month):\n return self.form.set_value(\"output period \\\"month from\\\"\", MONTHS[month - 1])", "def effective_invoice_month(self) -> pulumi.Input['GoogleTypeDateArgs']:\n return pulumi.get(self, \"effective_invoice_month\")", "def month(self):\n return 0", "def month(self):\n return 0", "def longMonthName(self, p_int, QDate_MonthNameType=None): # real signature unknown; restored from __doc__ with multiple overloads\r\n return QString", "def test_invalid_month_orig(self):\n year, month, error = clean_year_month(2014, 3, 13)\n self.assertEqual(year, 2014)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def month_content():\n \n user_id = current_identity.id\n month = request.form.get(\"month\")\n year = request.form.get(\"year\")\n \n if not is_month(month, year, user_id):\n establish_month(month, year, user_id)\n\n dayContentDict = format_day_content(month, year, user_id)\n\n d = collections.defaultdict(dict)\n response = {\n \"status\": \"ok\",\n \"dayContent\" : d\n }\n \n if dayContentDict:\n response[\"dayContent\"] = dayContentDict\n \n return jsonify(response)", "def clean_unit(unit):\n return 'M' if unit.lower() == 'month' else unit[0].lower()", "def plastic_date():\n return 'Zun, 99 Zun 9999 99:61:61'", "def set_month(self, month):\n # if the sting value of month is correct then we transform it into an\n # integer\n if isinstance(month, str):\n if month in MONTH_STR:\n month_int = MONTH_STR.index(month) + 1\n else:\n raise ValueError(\"Weekday as a string can only take the value {}\".format(MONTH_STR))\n else:\n month_int = month\n\n # Check if month_int in good range\n if month_int not in range(1, 13):\n raise ValueError(\"Month value must be in range [1..12] but is {}\".format(month_int))\n\n # First we separate the tens and the digit\n tens, digit = divmod(int(month_int), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to the register\n self.__write_register(_REGISTER_MONTH, reg_value)", "def test_str(self):\n arm = self.ar[2009][11]\n self.assertEqual(str(arm), '<AwstatsMonth 2009-11>')", "def day_of_months(self, year, month, day):\n if month.isdigit() and int(month) < 13:\n if (int(month) in [1,3,5,7,8,10,12]):\n Input.condition(self, year, month, day, '31', '')\n elif (int(month) in [4,6,9,11]):\n Input.condition(self, year, month, day, '30', '')\n elif int(month) == 2:\n if (((int(year) % 4) == 0 and\n not (int(year) % 100) == 0)\n or (int(year) % 400) == 0):\n if int(year) == 1712 and int(day) == 30:\n \"\"\"Easter Egg.\"\"\"\n Input.condition(self, year, month, day, '30','')\n Input.special_case(self)\n else:\n Input.condition(self, year, month, day, '29',' ')\n else:\n Input.condition(self, year, month, day, '28', '29')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter month between 1-12 or month name')", "def month():\n \n # get month entered by user - if no month entered default to current month\n month = request.args.get(\"month\", datetime.now().strftime(\"%Y-%m\"))\n \n # get budget data for month as a dictionary\n data = budget_data(month)\n \n return json.dumps(data)", "def cleanup_text(sent):\n monthStrings = list(calendar.month_name)[1:] + list(calendar.month_abbr)[1:]\n monthPattern = '|'.join(monthStrings)\n sent = re.sub(r'\\s+', ' ', str(sent)).strip()\n sent = re.sub(r'\\/+', '', sent)\n sent = re.sub(r'U.S.', 'United States', sent)\n sent = re.sub(r'CORRECTED-', '', sent)\n sent = re.sub(r'^(\\W?[A-Z\\s\\d]+\\b-?)', '', sent)\n sent = re.sub(r'^ ?\\W ', '', sent)\n sent = re.sub(r'(\\s*-+\\s*[A-Za-z]+)$', '', sent)\n sent = re.sub(r\"(\\'+[A-Z1-9]+\\'*)$\", '', sent)\n sent = re.sub(r\"[$'|]+\", '', sent)\n sent = re.sub(r'({}) \\d+'.format(monthPattern), '', sent)\n\n sent = sent.lower().strip()\n\n return sent", "def format_date(self, data):\n return '%s/%s' % (data.month, data.day)", "def create_yearmonth_link(d,fieldname):\n title = smart_unicode(d.strftime('%Y %B'))\n param_dict = { \n '%s__year' % fieldname: str(d.year), \n '%s__month' % fieldname: str(d.month), \n }\n return title,param_dict", "def yymm(self) -> str:\n if self.is_old_style:\n numeric_part = self.split('/', 1)[1]\n yy = numeric_part[0:2]\n mm = numeric_part[2:4]\n else:\n yy = self[:2]\n mm = self[2:4]\n return f'{yy}{mm}'", "def get_month(self, indate):\n return indate.strftime(\"%B\") + \"-\" + indate.strftime(\"%Y\")", "def month(self) -> int:\n if self.is_old_style:\n return int(self.split('/', 1)[1][2:4])\n return int(self[2:4])", "def aMonth(self):\n return self._amon", "def setMonth(self, *args):\n return _libsbml.Date_setMonth(self, *args)", "def str_day_month(s):\n # TODO: Fix the -06:00 time zone offset\n if s:\n d = convert_from_iso(s)\n return datetime.datetime.strftime(d, \"%B %d|%A\").strip(\"0\")\n else:\n # Couldn't parse, return original.\n return s", "def get_date(self,yearlimits=[1500,2020]):\n\t\thead = self.raw_text()[:300] \t \t \n\t\tparser = Regexdate(head) \t \t\t\n\t\tyear = parser.find_year(yearlimits)\t\t\n\t\tmonth = parser.find_month()\n\t\tday = parser.find_day()\n\t\tif day and year != \"\":\n\t\t\treturn year + \"-\" + month + \"-\" + day\t\n\t\tif year:\n\t\t\treturn year\n\t\treturn \"\"", "def month(self, month):\n\n self._month = month", "def get_month_name(month_of_year):\n return VALID_MONTHS[month_of_year - 1].title()", "def _build_efem_month(self,list_efem_month):\n\n def add_value_dict(key, dict_data, msj=None):\n if key in dict_data.keys():\n dict_data[key] = dict_data[key] + [msj]\n else:\n dict_data[key] = [msj]\n return dict_data\n\n result_data = dict()\n for efem in list_efem_month:\n datetime_efem = efem.date_efem\n result_data = add_value_dict(str(datetime_efem.day), result_data, efem.msj_efem)\n return result_data", "def get_month(x):\n return x[\"SALE DATE\"].month", "def shortMonthName(self, p_int, QDate_MonthNameType=None): # real signature unknown; restored from __doc__ with multiple overloads\r\n return QString", "def set_finish_month(self, month):\n return self.form.set_value(\"output period \\\"month to\\\"\", MONTHS[month - 1])", "def refine_date(c):\n return strip_some_punct(c)", "def MonthYearFieldWidget(field, request):\n return z3c.form.widget.FieldWidget(field, MonthYearWidget(request))", "def pMonth(self):\n return self._pmon", "def problem3_3(month, day, year):\r\n \r\n months = (\"January\", \"February\", \"March\",\"April\",\"May\",\"June\",\"July\",\\\r\n \"August\",\"September\",\"October\",\"November\",\"December\")\r\n month = month - 1 \r\n Month_prin = months[month]\r\n Date_print = Month_prin + \" \" + str(day) + \",\" + \" \" +str(year)\r\n print(Date_print)", "def uCSIsEnclosedCJKLettersandMonths(code):\n ret = libxml2mod.xmlUCSIsEnclosedCJKLettersandMonths(code)\n return ret", "def special_case(self):\n Input.clear_display(self, self.entries[4])\n self.entries[4].insert(INSERT, '1712/02/30 was a real date in Sweden')\n self.entries[4].configure(state='readonly')", "def decrement_month(self):\n month: int = int(self.month)\n month -= 1\n if month == 0:\n month == 12\n year: int = int(self.year)\n year -= 1\n self.year = str(year)\n self.month = str(month)\n if len(self.month) == 1:\n self.month = \"0\" + self.month", "def get_month(string): \n return int(string[15:17])", "def test_valid_month(self):\n ar_month = self.ar[2009][11]\n self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))", "def get_ekadashi_name(paksha, lmonth):\n if paksha == 'shukla':\n if lmonth == int(lmonth):\n return '%s-EkAdazI' % NAMES['SHUKLA_EKADASHI_NAMES']['hk'][lmonth]\n else:\n # adhika mAsam\n return '%s-EkAdazI' % NAMES['SHUKLA_EKADASHI_NAMES']['hk'][13]\n elif paksha == 'krishna':\n if lmonth == int(lmonth):\n return '%s-EkAdazI' % NAMES['KRISHNA_EKADASHI_NAMES']['hk'][lmonth]\n else:\n # adhika mAsam\n return '%s-EkAdazI' % NAMES['KRISHNA_EKADASHI_NAMES']['hk'][13]", "def month_adj():\n\n user_id = current_identity.id\n dayDate = request.form.get(\"dayDate\")\n newVal = request.form.get(\"newVal\")\n elemName = request.form.get(\"ElemName\")\n\n day = parse_day(dayDate)\n month = parse_month(dayDate)\n year = parse_year(dayDate)\n\n commit_adj_to_db(user_id, day, month, year, newVal, elemName)\n\n response = {\"status\" : \"ok\"}\n\n return jsonify(response)", "def month(self, month: str):\n return get_from_list(self.months, \"month\", month)", "def dateB(self):\r\n self.date = self.cal.selectedDate()\r\n self.lineEditWidgets[\"CUMPLEAร‘OS\"].setText(\r\n self.date.toString(\"yyyy-MM-dd\"))", "def get_month(url):\n month = \" \".join(re.findall(\"[a-zA-Z]+\", url))\n return month", "def Month(self):\n return self._fmon", "def convert_month(match):\n day, month, year = tuple(match.group().split())\n list_full_months = [\"janvier\", \"fรฉvrier\", \"mars\", \"avril\", \"mai\", \"juin\", \"juillet\", \"aoรปt\", \"septembre\", \"octobre\",\n \"novembre\", \"dรฉcembre\"]\n list_trunc_month = [\"janv.\", \"fรฉvr.\", \"mars\", \"avr.\", \"mai\", \"juin\", \"juill.\", \"aoรปt\", \"sept.\", \"oct.\",\n \"nov.\", \"dรฉc.\"]\n if month in list_full_months:\n month_number = list_full_months.index(month) + 1\n elif month in list_trunc_month:\n month_number = list_trunc_month.index(month) + 1\n if day in [\"1ยฐ\", \"1er\"]:\n day = 1\n return \"%s%02d%02d\" % (year, month_number, int(day))", "def year_month_day(cls,\n year: typing.Union[int, str],\n month: typing.Union[int, str],\n day: typing.Union[int, str],\n )->str:\n yearstr: str\n if isinstance(year, int):\n yearstr = str(year)\n else:\n yearstr = year\n\n monthstr: str\n if isinstance(month, int):\n monthstr = str(month)\n else:\n monthstr = month\n if len(monthstr) == 1:\n monthstr = \"0\" + monthstr\n\n daystr: str\n if isinstance(day, int):\n daystr = str(day)\n else:\n daystr = day\n if len(daystr) == 1:\n daystr = \"0\" + daystr\n\n return cls.DATE_AND_TIMES_SIGIL + yearstr + \"-\" + monthstr + \"-\" + daystr + \"T00:00:00/11\"", "def test_custom_date_entry():\n date = datetime.date(2017, 1, 31)\n target = util.get_current_entry('./', date=date)\n assert target.endswith('January-2017/Jan-31.md')", "def __str__(self):\n return '{y}-{m:0>2}-{d:0>2}'.format(y=self.year, m=self.month, d=self.day)", "def __getMonth(self,xml):\n\t\t#TODO: Monat, Jahr, SollStunden, Urlaub,ZeitdiffAkt, ZeitdiffVor, erweitert\n\t\tdayTypeMapping = {'Arbeitstag': DayType.work,\n\t\t\t\t\t\t'Wochenende': DayType.weekend,\n\t\t\t\t\t\t'Urlaub': DayType.vacation,\n\t\t\t\t\t\t'Feiertag': DayType.holiday,\n\t\t\t\t\t\t'Krankheit': DayType.illness,\n\t\t\t\t\t\t'รœberstunden genommen': DayType.overtime_free,\n\t\t\t\t\t\t'Dienstreise': DayType.business_trip,\n\t\t\t\t\t\t'Freistellung': DayType.unpaid_free}\n\t\tworkdays = {}\n\t\tmonthNum = int(xml.find('Monat').text)\n\t\tyearNum = int(xml.find('Jahr').text)\n\t\tif xml.find('Erweitert').text == 'true':\n\t\t\textendedFormat = True\n\t\telse:\n\t\t\textendedFormat = False\n\t\tfor panday in xml.findall('Tag'):\n\t\t\t# parse\n\t\t\tnumday = int(panday.find('Datum').text)\n\t\t\tdaytype = panday.find('TagesTyp').text\n\t\t\tdescription = panday.find('Bemerkung').text\n\t\t\tmorning = panday.find('Vormittag').text\n\t\t\tafternoon = panday.find('Nachmittag').text\n\t\t\tif extendedFormat:\n\t\t\t\tthird = panday.find('Dritte').text\n\t\t\t\tfourth = panday.find('Vierte').text\n\t\t\telse:\n\t\t\t\tthird = None\n\t\t\t\tfourth = None\n\t\t\t# convert\n\t\t\tdaytype = dayTypeMapping[daytype]\n\t\t\tmorning = self. _parsePANTimeRange(morning)\n\t\t\tafternoon = self. _parsePANTimeRange(afternoon)\n\t\t\tthird = self. _parsePANTimeRange(third)\n\t\t\tfourth = self. _parsePANTimeRange(fourth)\t\t\t\n\t\t\ttimeblocks = [morning, afternoon, third, fourth]\n\t\t\ttimeblocks = list(filter(None, timeblocks))\n\t\t\t# save\n\t\t\tday = WorkDay(daytype, description, timeblocks)\n\t\t\tworkdays[numday] = day\n\t\tmonth = WorkMonth(yearNum,monthNum,workdays)\n\t\treturn month", "def month(self):\n return self.__month", "def get_month(self, as_str=False):\n\n # First we get the first 8 bits stored in the month register\n month_bcd = self.__read_register(_REGISTER_MONTH)\n\n # Then we extract the digits and the tens\n tens = (month_bcd & 0x10) >> 4 # 0x10 = 0b00010000\n digit = (month_bcd & 0x0F) # 0x0F = 0b00001111\n\n month = 10 * (tens) + digit\n\n if as_str is True: # if we want the month's name\n month = MONTH_STR[month - 1]\n\n return month", "def test_date_by_yr_mo_day_wholemonth_and_suffix(self):\n spi_search = \"find date 1976-04-01 and t dog\"\n inv_search = 'year:1976-04 and title:dog'\n self._compare_searches(inv_search, spi_search)", "def test_invalid_date_format(self):\n date_field = 'expiry_date'\n self.batch_data['expiry_date'] = date_field\n resp = self.query_with_token(\n self.access_token, batch_info_query.format(**self.batch_data))\n self.assertIn(\n 'invalid literal',\n resp['errors'][0]['message'])", "def test_last_millenium(self):\n term, rmd = util.parse_date(\"old paper 9505\")\n ym = util.parse_date_partial(term)\n self.assertEqual(ym, \"1995-05\")\n self.assertEqual(rmd, \"old paper\", \"Should have a remainder\")", "def string_date(mnthDay, year):\n return(mnthDay + '/' + str(year))", "def date(self, date):\n self.value = date.strftime(\"%Y-%m-%d\") if date else \"\"", "def formatMonthName(self, theyear, themonth, withyear=True,\n withlinks=True):\n if withyear:\n title = '%s %s' % (month_name[themonth], theyear)\n else:\n title = '%s' % month_name[themonth]\n\n if withlinks:\n prev_month = themonth - 1\n prev_month_year = theyear\n next_month = themonth + 1\n next_month_year = theyear\n if prev_month == 0:\n prev_month = 12\n prev_month_year -= 1\n elif next_month == 13:\n next_month = 1\n next_month_year += 1\n\n title = ('<a href=\"?year=%(py)d&month=%(m)d\">'\n '&lt;&lt;</a>%(s)s'\n '<a href=\"?year=%(pmy)d&month=%(pm)d\">'\n '&lt;</a>%(s)s'\n '%(title)s'\n '%(s)s<a href=\"?year=%(nmy)d&month=%(nm)d\">'\n '&gt;</a>'\n '%(s)s<a href=\"?year=%(ny)d&month=%(m)d\">'\n '&gt;&gt;</a>'\n % {'m': themonth, 'py': theyear-1,\n 's': '&nbsp;&nbsp;', 'pm': prev_month,\n 'pmy': prev_month_year, 'title': title,\n 'nm': next_month, 'nmy': next_month_year,\n 'ny': theyear+1 })\n\n return '<tr><th colspan=\"7\" class=\"month\">%s</th></tr>' % title", "def _get_months(self, cr, uid, context):\n months=[(str(n),str(n)) for n in range(1,13)]\n return months", "def test_monthly_report_error(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.expense)\n self.assertEqual(res.status_code, 201)\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=\n {'name': 'soda', 'amount': 200, 'date_of_expense': '10-01-2021'})\n month = 4567\n res = self.client().get(f'/monthly_report?month={month}', headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(res.status_code, 400)\n results = json.loads(res.data)\n self.assertEqual(results['message'], f'The date {month} does not match the format MM-YYYY')", "def __month(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"month\",\n operand1=self,\n operand2=None\n )", "def message(self, year, month, day):\n\n weekday = ['Sunday', 'Monday', 'Tuesday',\n 'Wednesday', 'Thursday', 'Friday', 'Saturday']\n a = int((14 - int(month))/12)\n y = int(year) - a\n m = int(month) + (12*a) -2\n d = (int(day) + y + int(y/4) - int(y/100) +\n int(y/400) + int((31*m)/12)) % 7\n x = weekday[d]\n name = Output.wordmonth(self, month)\n Day = Output.ordinal(self, day)\n weekday_txt = name + ' ' + Day + ', ' + year\n\n StrYear = int(strftime('%Y'))\n StrMonth = int(strftime('%m'))\n StrDay = int(strftime('%d'))\n DisplayTime = Output.JulianDN(self, year, month, day)\n CurrentDate = Output.JulianDN(self, StrYear, StrMonth, StrDay)\n\n if DisplayTime == CurrentDate:\n text = Output.final_output(self, '','Today is a ', x)\n elif DisplayTime == CurrentDate + 1:\n text = Output.final_output(self, '','Tomorrow will be a ', x)\n elif DisplayTime == CurrentDate - 1:\n text = Output.final_output(self, '','Yesterday was a ', x)\n elif DisplayTime > CurrentDate:\n text = Output.final_output(self, weekday_txt,' will be a ', x)\n elif DisplayTime < CurrentDate:\n text = Output.final_output(self, weekday_txt,' was a ', x)\n return text", "def parse_date(td):\n\tresYear = float(td.days)/364.0 # get the number of years including the the numbers after the dot\n\tresMonth = int((resYear - int(resYear))*364/30) # get the number of months, by multiply the number after the dot by 364 and divide by 30.\n\tresYear = int(resYear)\n\treturn str(resYear) + \"y\" + str(resMonth) + \"m\"", "def formatEditText(self, storedText):\n format = globalref.options.strData('EditDateFormat', True)\n try:\n return (GenDate(storedText).dateStr(format), True)\n except GenDateError:\n return (storedText, not storedText)", "def formatMonth(self, theyear, themonth, withyear=True):\n html = ('<table class=\"month\">\\n%s\\n%s\\n'\n % (self.formatMonthName(theyear, themonth,\n withyear=withyear), self.formatWeekHeader()))\n weeks = self.monthdatescalendar(theyear, themonth)\n for week in weeks:\n html += self.formatWeek(themonth, week, len(weeks)) + '\\n'\n return html + '</table>\\n'", "def _get_month(num) -> str:\n return dict(\n [\n (1, \"JAN\"), (2, \"FEB\"), (3, \"MAR\"), (4, \"APR\"), (5, \"MAY\"), (6, \"JUN\"),\n (7, \"JUL\"), (8, \"AUG\"), (9, \"SEP\"), (10, \"OCT\"), (11, \"NOV\"), (12, \"DEC\")\n ]\n )[num]", "def test_date_partial_only(self):\n term, rmd = util.parse_date(\"1902\")\n ym = util.parse_date_partial(term)\n self.assertEqual(ym, \"2019-02\")\n self.assertEqual(rmd, \"\", \"Should have no remainder\")", "def march(string):\n None", "def get_month_name(month):\n datetime_object = datetime.datetime.strptime(str(month), \"%m\")\n month_name = str(datetime_object.strftime(\"%b\"))\n return month_name", "def month(self):\n return self._months", "def __unicode__(self):\n return unicode(self.GetCalendarString())", "def __unicode__(self):\n return unicode(self.GetCalendarString())", "def get_date_display(self, context):\n return '{year}/{month}'.format(year=self.get_year(),\n month=self.get_month().zfill(2))", "def toisomonth(month):\n mes = {'JAN': '01', 'FEV': '02', 'FEB': '02', 'MAR': '03',\n 'ABR': '04', 'APR': '04', 'MAI': '05', 'MAY': '05',\n 'JUN': '06', 'JUL': '07', 'AGO': '08', 'AUG': '08',\n 'SET': '09', 'SEP': '09', 'OUT': '10', 'OCT': '10',\n 'NOV': '11', 'DEZ': '12', 'DEC': '12'}\n\n try:\n return mes[month.upper()[:3]]\n except KeyError:\n return month", "def normalise_date(text):\n text = text.replace('/', '-')\n text = text.replace(':', '-')\n text = text.replace('.', '-')\n text = text.replace('@', '-')\n return text", "def month(self):\n return self._month", "def month(self):\n return self._month", "def convert_month(string): \n datetime_object = datetime.datetime.strptime(string, \"%B\")\n\n month_number = datetime_object.month\n\n return month_number", "def showNextMonth(self):\n pass", "def print_month_header(month):\n print(\"Month #\" + str(month))\n print(\"Sun Mon Tue Wed Thu Fri Sat\")", "def parse_monthly_dates(self, x, pattern, ext, rename=None):\n datestring = self.scrub_string(x, pattern, ext, rename=rename)\n return datetime.datetime.strptime(datestring, '%Y%m')", "def named_month(pMonthNumber):\n return datetime.date(1900, pMonthNumber, 1).strftime('%B')", "def setIndexMonth(self,index):\n self.indexMonth = index" ]
[ "0.5688062", "0.5482961", "0.548005", "0.5468909", "0.54230297", "0.5412855", "0.530084", "0.52983487", "0.52577066", "0.51923347", "0.5177958", "0.5175429", "0.5157469", "0.5145739", "0.51417935", "0.5081514", "0.5069298", "0.5042676", "0.5035212", "0.5016211", "0.5014319", "0.49913096", "0.49913096", "0.4990017", "0.49862343", "0.49812415", "0.49738485", "0.49704617", "0.49689215", "0.49645206", "0.49459323", "0.49411714", "0.49399406", "0.4937535", "0.49313617", "0.49286443", "0.4926649", "0.49238613", "0.49131867", "0.49087435", "0.48800817", "0.48642057", "0.4860487", "0.48370793", "0.4835046", "0.4829126", "0.4823694", "0.48008418", "0.48008347", "0.4788407", "0.47800636", "0.47744069", "0.47612038", "0.47579274", "0.47573435", "0.47556025", "0.47407427", "0.473886", "0.47364447", "0.47336328", "0.47255656", "0.47244498", "0.47238377", "0.47228754", "0.47194016", "0.47042826", "0.4699491", "0.46885654", "0.46838942", "0.46677694", "0.46620795", "0.46547002", "0.46127373", "0.46104437", "0.4608351", "0.4596405", "0.45930424", "0.45878336", "0.45855725", "0.45841044", "0.4582156", "0.45809734", "0.45806125", "0.45748454", "0.45699957", "0.4566774", "0.4564423", "0.45620504", "0.45570534", "0.45570534", "0.45547298", "0.4549044", "0.4547845", "0.4537625", "0.4537625", "0.4536359", "0.4533738", "0.45277023", "0.4525633", "0.452551", "0.4522839" ]
0.0
-1
Send boundary values EMonth field (EMonth= 12)
def test_19(self): assert 'True' == Api.requestBlock('test-19')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_Month(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Month', value)", "def effective_invoice_month(self) -> pulumi.Input['GoogleTypeDateArgs']:\n return pulumi.get(self, \"effective_invoice_month\")", "def _build_efem_month(self,list_efem_month):\n\n def add_value_dict(key, dict_data, msj=None):\n if key in dict_data.keys():\n dict_data[key] = dict_data[key] + [msj]\n else:\n dict_data[key] = [msj]\n return dict_data\n\n result_data = dict()\n for efem in list_efem_month:\n datetime_efem = efem.date_efem\n result_data = add_value_dict(str(datetime_efem.day), result_data, efem.msj_efem)\n return result_data", "def month_adj():\n\n user_id = current_identity.id\n dayDate = request.form.get(\"dayDate\")\n newVal = request.form.get(\"newVal\")\n elemName = request.form.get(\"ElemName\")\n\n day = parse_day(dayDate)\n month = parse_month(dayDate)\n year = parse_year(dayDate)\n\n commit_adj_to_db(user_id, day, month, year, newVal, elemName)\n\n response = {\"status\" : \"ok\"}\n\n return jsonify(response)", "def set_finish_month(self, month):\n return self.form.set_value(\"output period \\\"month to\\\"\", MONTHS[month - 1])", "def month(self):\n return 0", "def month(self):\n return 0", "def set_start_month(self, month):\n return self.form.set_value(\"output period \\\"month from\\\"\", MONTHS[month - 1])", "def month(self, month):\n\n self._month = month", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"MTM\", MTM_MINS_COST)\n self.bill.add_fixed_cost(MTM_MONTHLY_FEE)", "def setMonth(self, *args):\n return _libsbml.Date_setMonth(self, *args)", "def test_monthly_report_error(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.expense)\n self.assertEqual(res.status_code, 201)\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=\n {'name': 'soda', 'amount': 200, 'date_of_expense': '10-01-2021'})\n month = 4567\n res = self.client().get(f'/monthly_report?month={month}', headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(res.status_code, 400)\n results = json.loads(res.data)\n self.assertEqual(results['message'], f'The date {month} does not match the format MM-YYYY')", "def monthly_schedule(self,month):\n response = requests.get(f'http://company.com/{self.lname}/{month}')\n if response.ok:\n return response.text\n else:\n return 'Bad Response!'", "def set_month(self, month):\r\n\t\tmonths = ['Enero', 'Febrero', 'Marzo', 'Abril',\r\n\t\t\t\t 'Mayo', 'Junio', 'Julio', 'Agosto'\r\n\t\t\t\t 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre']\r\n\t\tfor i in range(12):\r\n\t\t\tif month == i: \r\n\t\t\t\treturn months[i-1]", "def get_month():\n return handle_invalid_inputs(question_3, months)", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"TERM\", TERM_MINS_COST)\n self.bill.add_fixed_cost(TERM_MONTHLY_FEE)\n if not ((self.end.month < month and self.end.year <= year) or\n self.end.year < year):\n # refresh included minutes and SMSs\n self.bill.add_free_minutes((-1) * self.bill.free_min)\n if self.start.month == month and self.start.year == year:\n # if first month, add term deposit to bill.\n self.bill.add_fixed_cost(TERM_DEPOSIT)\n else:\n self._carried_term = True", "def set_month(self, month):\n # if the sting value of month is correct then we transform it into an\n # integer\n if isinstance(month, str):\n if month in MONTH_STR:\n month_int = MONTH_STR.index(month) + 1\n else:\n raise ValueError(\"Weekday as a string can only take the value {}\".format(MONTH_STR))\n else:\n month_int = month\n\n # Check if month_int in good range\n if month_int not in range(1, 13):\n raise ValueError(\"Month value must be in range [1..12] but is {}\".format(month_int))\n\n # First we separate the tens and the digit\n tens, digit = divmod(int(month_int), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to the register\n self.__write_register(_REGISTER_MONTH, reg_value)", "def month():\n \n # get month entered by user - if no month entered default to current month\n month = request.args.get(\"month\", datetime.now().strftime(\"%Y-%m\"))\n \n # get budget data for month as a dictionary\n data = budget_data(month)\n \n return json.dumps(data)", "def test_valid_month(self):\n ar_month = self.ar[2009][11]\n self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))", "def month_days():\n user_id = current_identity.id\n month = request.args.get(\"month\")\n year = request.args.get(\"year\")\n \n if user_id:\n \n response = {\n \"status\": None,\n \"dateArray\" : []\n }\n possibleDateArr = format_dayArray(month, year, user_id)\n\n if not possibleDateArr:\n return jsonify({\"status\" : \"error\"})\n\n response[\"dateArray\"] = possibleDateArr\n \n response[\"status\"] = \"ok\"\n \n return jsonify(response)", "def _set_month(self, month) -> bool:\n if self.set_start_month(month) is False:\n return False\n return self.set_finish_month(month)", "def process_month(self):\n if self.balance > 0:\n # if positive balance, convert APR to monthly multiplicative factor\n monthly_factor = pow(1 + self.apr, 1 / 12)\n self.balance *= monthly_factor", "def month(m=0):\n if not 1 <= m <= 12:\n # throw error\n return jsonify({}), status.HTTP_400_BAD_REQUEST\n holidays = Holidays.query.filter_by(month=m).all()\n\n this_month = {}\n for h in holidays:\n this_month[h.day] = this_month.get(h.day, []) + [h.holiday]\n\n return jsonify({\"month\": m, \"holidays\": this_month})", "def aMonth(self):\n return self._amon", "def get_end_month(month):\n return datetime(2020, month, 28)", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n raise NotImplementedError", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"PREPAID\", PREPAID_MINS_COST)\n if self.balance > (-10.0):\n self.balance += (-25.0)\n self.bill.add_fixed_cost(self.balance)", "def test_invalid_month_orig(self):\n year, month, error = clean_year_month(2014, 3, 13)\n self.assertEqual(year, 2014)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def month_content():\n \n user_id = current_identity.id\n month = request.form.get(\"month\")\n year = request.form.get(\"year\")\n \n if not is_month(month, year, user_id):\n establish_month(month, year, user_id)\n\n dayContentDict = format_day_content(month, year, user_id)\n\n d = collections.defaultdict(dict)\n response = {\n \"status\": \"ok\",\n \"dayContent\" : d\n }\n \n if dayContentDict:\n response[\"dayContent\"] = dayContentDict\n \n return jsonify(response)", "def setIndexMonth(self,index):\n self.indexMonth = index", "def add_month(cab_data):\n return cab_data.assign(month=lambda x: x.time.dt.month)", "def calendar_month(year, month):\n start = datetime.datetime(year, month, 1)\n if month == 12:\n end = datetime.datetime(year+1, 1, 1)\n else:\n end = datetime.datetime(year, month+1, 1)\n print(start)\n print(end)\n return start, end", "def eomday(year, month):\n if hasattr(year, '__iter__'):\n assert hasattr(month, '__iter__')\n return np.array([calendar.monthrange(y, m)[-1] for y, m in zip(year, month)])\n else:\n return calendar.monthrange(year, month)[-1]", "def day_of_month(self, day_of_month):\n\n self._day_of_month = day_of_month", "def monthly_day(self, monthly_day):\n\n self._monthly_day = monthly_day", "def eme_500day_2018(self):\n a = de421_planets()\n self.FBseq = a.eme_500day_2018(self.MARS_DIST)\n self.N = len(self.FBseq)\n self.partials = 'eme_500day_2018_single'\n self.finals = 'eme_500day_2018_multi'", "def month(self):\n return self.__month", "def test_parameter_checking(self):\n params = ['12-2015', '12-15', '2015_12', '2015.12', '12.2015', '12/2015', '12/15', '2015-00', '2015-13']\n # list of examples of possible wrong month parameters\n for month in params:\n with self.assertRaises(ValueError) as cm: # ValueError should be raised with proper message\n download_data(month)\n self.assertEqual('Month parameter should be in form `yyyy-mm`', cm.exception.args[0], msg=month)\n # check if error message was as expected", "def get_month(x):\n return x[\"SALE DATE\"].month", "def day_of_months(self, year, month, day):\n if month.isdigit() and int(month) < 13:\n if (int(month) in [1,3,5,7,8,10,12]):\n Input.condition(self, year, month, day, '31', '')\n elif (int(month) in [4,6,9,11]):\n Input.condition(self, year, month, day, '30', '')\n elif int(month) == 2:\n if (((int(year) % 4) == 0 and\n not (int(year) % 100) == 0)\n or (int(year) % 400) == 0):\n if int(year) == 1712 and int(day) == 30:\n \"\"\"Easter Egg.\"\"\"\n Input.condition(self, year, month, day, '30','')\n Input.special_case(self)\n else:\n Input.condition(self, year, month, day, '29',' ')\n else:\n Input.condition(self, year, month, day, '28', '29')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter month between 1-12 or month name')", "def test_bad_quarter_or_month(self):\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"12/2016\",\n \"reporting_period_end_date\": \"13/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n # make sure date checks work as expected for an existing submission\n \"existing_submission_id\": self.status_check_submission_id,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"AB/2016\",\n \"reporting_period_end_date\": \"CD/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"Q1/ABCD\",\n \"reporting_period_end_date\": \"Q2/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])", "def test_cells_charts_post_chart_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_value_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def get_api_info_option_month_and_underlying_month_r(\n self,\n\n headers: t.Dict[str, str] = None,\n body: JSONEncodable = None,\n fields_data: t.Dict[str, str] = None,\n **kwargs\n ):\n r = self._do_call(\n method='GET',\n url=f'{self.API_BASE_URL}/info/option-month-and-underlying-month',\n headers=headers,\n body=body,\n fields=fields_data,\n **kwargs\n )\n return r", "def month_digit(self, year, month, day):\n if (month[0] == '0' and len(month) > 2 or\n day.isdigit() and day[0] == '0' and len(day) > 2):\n Input.change_display(self, self.entries[4],\n 'Remove preceding zeros')\n else:\n Input.day_of_months(self, year, month, day.lstrip('0'))", "def _get_eur_gbp_last_month(self) -> None:\n last_month = _last_month()\n data = _get_ecb_data(FREQUENCY_MONTHLY, last_month, last_month)\n\n self.eur_gbp_last_month = _get_latest_ecb_rate(data)", "def parse_month(self, response):\n month, year = response.meta[\"month\"], response.meta[\"year\"]\n dates = response.json().get(\"data\", [])\n\n for gazette_name in dates.values():\n date = re.search(self.DATE_REGEX, gazette_name).group()\n\n if date is None:\n continue\n\n date = parse(date, settings={\"DATE_ORDER\": \"DMY\"}).date()\n\n if date < self.start_date:\n continue\n\n url = f\"{self.GAZETTE_URL}?dir={year}/{month}/{gazette_name}\"\n yield Request(url, callback=self.parse_gazette)", "def test_cells_charts_post_chart_series_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_series_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def _get_months(self, cr, uid, context):\n months=[(str(n),str(n)) for n in range(1,13)]\n return months", "def month(self):\n return self._months", "def months(self, months):\n allowed_values = [\"january\", \"feburary\", \"march\", \"april\", \"may\", \"june\", \"july\", \"august\", \"september\", \"october\", \"november\", \"december\"] # noqa: E501\n if not set(months).issubset(set(allowed_values)):\n raise ValueError(\n \"Invalid values for `months` [{0}], must be a subset of [{1}]\" # noqa: E501\n .format(\", \".join(map(str, set(months) - set(allowed_values))), # noqa: E501\n \", \".join(map(str, allowed_values)))\n )\n\n self._months = months", "def mm(self):\n return '%02d' % self._month", "def test_cells_charts_post_chart_second_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_second_value_axis(name, sheet_name,chartIndex, axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def set_value(self, value):\n\t\tassert len(value.split('-')) > 1, 'A minimum of year and month are needed.'\n\t\tself.values = map(int, value.split('-'))", "def pMonth(self):\n return self._pmon", "def _boundary_value(self) -> str:\n ...", "def GetAvailabilityForMonth(year, month, VerkehrstageHex):\r\n\t# Get Last Day of the Month in Python\r\n\t# http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python\r\n\t\"\"\"\r\n\tmonthrange(year, month):\r\n Returns weekday of first day of the month and number of days in month\r\n \"\"\"\r\n\tDayRange = calendar.monthrange(year,month)\r\n\tMonthEndDay = DayRange[1]\r\n\r\n\tStartDate = date(year,month,1)\r\n\tEndDate = date(year,month,MonthEndDay)\t\t\r\n\treturn GetAvailabilityBetweenDates(StartDate, EndDate, VerkehrstageHex)", "def calendarPageChanged(self, year, month):\n success = self.porker_thread.extendDates(datetime.date(year, month, 1))\n #if not success:\n # self.alertMessage(\"Failure!\",\"Unable to extend the thread's dates for some reason.\")\n #efficiency = self.porker_thread.getEfficiencyFor(self.getActiveDate())\n #self.porker_thread.sentDatesData = False", "def take_monthly_agreements():\n\n #collect data from form template\n chore_id = request.form.get(\"chore_id\")\n date_monthly = request.form.get(\"date_monthly\")\n\n #add agreements to database\n dbwrangler.add_commitment(date_monthly, chore_id)\n\n #redirect to form for further agreements\n return redirect(\"/takeachore\")", "def test_date_accept_this_month(self):\n spi_search = \"find date this month\"\n inv_search = \"year:\" + datetime.datetime.strftime(datetime.datetime.today(), '%Y-%m')\n self._compare_searches(inv_search, spi_search)", "def month(self):\n return self._month", "def month(self):\n return self._month", "def E_Dynamic_MavkoEtAl2009(rhob,DTS,PR):\n E = (2*(rhob*1000)*((304800/DTS)**2)*(1+PR))/1000000\n return E", "def comp_month_range():\n word_months = _(\"months\")\n word_month = _(\"month\")\n COMP_MONTH_LIST = (\n (12, '- 12 ' + word_months),\n (11, '- 11 ' + word_months),\n (10, '- 10 ' + word_months),\n (9, '- 9 ' + word_months),\n (8, '- 8 ' + word_months),\n (7, '- 7 ' + word_months),\n (6, '- 6 ' + word_months),\n (5, '- 5 ' + word_months),\n (4, '- 4 ' + word_months),\n (3, '- 3 ' + word_months),\n (2, '- 2 ' + word_months),\n (1, '- 1 ' + word_month),\n )\n return COMP_MONTH_LIST", "def get_main_date(self, kwargs):\n month = int(kwargs['month'])\n if month >= 9 and month <= 12:\n main_date = datetime(int(kwargs['year_from']), month, 1)\n else:\n main_date = datetime(int(kwargs['year_to']), month, 1)\n return main_date", "def check_dates(self, kwargs):\n month = int(kwargs['month'])\n if (int(kwargs['year_from']) >= int(kwargs['year_to'])) or \\\n (month < 1 or month > 12):\n # kdyby datumy byly nejake dodrbane, tak se sverime do pece autoredirectu\n return HttpResponseRedirect(reverse('admin_redir'))\n return None", "def month(self, month: str):\n return get_from_list(self.months, \"month\", month)", "def test_get_occurrences_monthly_mid_month(self):\n print()\n print(\"Get occurrences of a monthly expense between:\")\n expense = BudgetExpense.objects.get(id = 100)\n start_date = expense.start_date\n end_date = start_date + timedelta(days = 40)\n print(start_date.strftime(\"%B %d, %y\")+\" and \"+end_date.strftime('%B %d, %y'))\n print(\"======================================\")\n result = get_anticipated_transaction_occurences(anticipated_transaction= expense, start_date = start_date, end_date = end_date)\n result_dates = []\n for current_expense in result.keys():\n print(current_expense)\n print(\"========================\")\n result_dates.extend(result.get(current_expense))\n for current_date in result_dates:\n print(\"Date: \"+current_date.strftime(\"%B %d, %y %T\"))\n print(\"======================\")\n print()\n date_1 = start_date\n days_in_month = monthrange(start_date.year, start_date.month)[1]\n date_2 = start_date + timedelta(days = days_in_month)\n \n self.assertEquals([date_1, date_2], result_dates)", "def news_for_month(self):\n\n raise NotImplementedError", "def __month(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"month\",\n operand1=self,\n operand2=None\n )", "def test_put_wrong_data(self):\n new_data = {\"fromMonth\": \"another\"}\n response = self.client.put(self.url + str(self.current_data[-1]['id']) + '/', data=json.dumps(new_data),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())", "def month_bounds(year, month):\n year = int(year)\n month = int(month)\n month_start = datetime.strptime('%s,%s,1' % (year, month),'%Y,%m,%d')\n # days_in_month returns a tuple(weekday, days) where\n # weekday is the eekday the month starts on and days is the number of days in the month\n days_in_month = calendar.monthrange(year,month)\n month_end = month_start + timedelta(days=days_in_month[1]-1)\n return (month_start, month_end)", "def Month(self):\n return self._fmon", "def __next_month(self, year, month):\n year, month = (year, month + 1) if month < 12 else (year + 1, 1)\n\n return self.create(year, month)", "def __getMonth(self,xml):\n\t\t#TODO: Monat, Jahr, SollStunden, Urlaub,ZeitdiffAkt, ZeitdiffVor, erweitert\n\t\tdayTypeMapping = {'Arbeitstag': DayType.work,\n\t\t\t\t\t\t'Wochenende': DayType.weekend,\n\t\t\t\t\t\t'Urlaub': DayType.vacation,\n\t\t\t\t\t\t'Feiertag': DayType.holiday,\n\t\t\t\t\t\t'Krankheit': DayType.illness,\n\t\t\t\t\t\t'รœberstunden genommen': DayType.overtime_free,\n\t\t\t\t\t\t'Dienstreise': DayType.business_trip,\n\t\t\t\t\t\t'Freistellung': DayType.unpaid_free}\n\t\tworkdays = {}\n\t\tmonthNum = int(xml.find('Monat').text)\n\t\tyearNum = int(xml.find('Jahr').text)\n\t\tif xml.find('Erweitert').text == 'true':\n\t\t\textendedFormat = True\n\t\telse:\n\t\t\textendedFormat = False\n\t\tfor panday in xml.findall('Tag'):\n\t\t\t# parse\n\t\t\tnumday = int(panday.find('Datum').text)\n\t\t\tdaytype = panday.find('TagesTyp').text\n\t\t\tdescription = panday.find('Bemerkung').text\n\t\t\tmorning = panday.find('Vormittag').text\n\t\t\tafternoon = panday.find('Nachmittag').text\n\t\t\tif extendedFormat:\n\t\t\t\tthird = panday.find('Dritte').text\n\t\t\t\tfourth = panday.find('Vierte').text\n\t\t\telse:\n\t\t\t\tthird = None\n\t\t\t\tfourth = None\n\t\t\t# convert\n\t\t\tdaytype = dayTypeMapping[daytype]\n\t\t\tmorning = self. _parsePANTimeRange(morning)\n\t\t\tafternoon = self. _parsePANTimeRange(afternoon)\n\t\t\tthird = self. _parsePANTimeRange(third)\n\t\t\tfourth = self. _parsePANTimeRange(fourth)\t\t\t\n\t\t\ttimeblocks = [morning, afternoon, third, fourth]\n\t\t\ttimeblocks = list(filter(None, timeblocks))\n\t\t\t# save\n\t\t\tday = WorkDay(daytype, description, timeblocks)\n\t\t\tworkdays[numday] = day\n\t\tmonth = WorkMonth(yearNum,monthNum,workdays)\n\t\treturn month", "def __get_step1_end_month(yaml_content: dict) -> str:\n\n end_month = None\n\n try:\n end_month = yaml_content['step1.end_month']\n except KeyError as exc:\n print(ConfigurationFactory.__get_key_missing_error_message(exc))\n\n return end_month", "def month(self) -> int:\n return self.arxiv_id.month", "def month(self) -> str:\r\n return self._month", "def _check_date(self, cr, uid,ids, context=None):\n for act in self.browse(cr, uid, ids, context):\n line_date = datetime.strptime(str(act.date), \"%Y-%m-%d\")\n if int(line_date.month)!=int(act.enrich_id.month) or int(line_date.year)!=int(act.enrich_id.year):\n raise osv.except_osv(_('ValidateError'), _(\"Payment Enrich Date Must Be Within Enrich Month And Year %s - %s\")%(act.enrich_id.month,act.enrich_id.year))\n return True", "def set_monthly(self, interval, *, day_of_month=None, days_of_week=None,\n index=None, **kwargs):\n if not day_of_month and not days_of_week:\n raise ValueError('Must provide day_of_month or days_of_week values')\n if day_of_month and days_of_week:\n raise ValueError('Must provide only one of the two options')\n self.set_daily(interval, **kwargs)\n if day_of_month:\n self.__day_of_month = day_of_month\n elif days_of_week:\n self.__days_of_week = set(days_of_week)\n if index:\n self.__index = index", "def getSpecificMonth(self, month, year):\n try:\n specificMonth = []\n args = [month, year]\n months = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"June\",\n \"July\", \"Aug\", \"Sept\", \"Oct\", \"Nov\", \"Dec\"]\n result_args = self.cursor.callproc(\"monthly_prev_months\", args)\n for result in self.cursor.stored_results():\n for r in result:\n specificMonth = [months[r[0]-1] +\n \" \" + str(r[1]), str(r[2])]\n return list(specificMonth)\n\n except Exception as e:\n return \"Error:\" + e", "def __init__(__self__, *,\n day_of_month: pulumi.Input[int],\n hand_off_time: pulumi.Input[str]):\n pulumi.set(__self__, \"day_of_month\", day_of_month)\n pulumi.set(__self__, \"hand_off_time\", hand_off_time)", "def on_change_mission_order(self, cr, uid, ids , mission_order_id , context=None):\n if context is None:\n context = {}\n res ={}\n result = []\n \n mission_order = self.pool.get('hr.employee.mission').browse(cr,uid,mission_order_id )\n \n return {'value': { 'start_grant_date': mission_order.start_date, \n 'end_grant_date': mission_order.end_date }}", "def _set_value_date_32A(self, val):\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount = val\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount.swiftTag = \"32A\"", "def month(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"month\")", "def set_M(self, morb):\n if not (0 < morb <= 12):\n raise ValueError\n self.set_par('Df_cnf_Fock', '')\n self.set_par('MORB', morb)", "def send_mail(month: str, data: list):\n\n V2RayLogger.debug('SMTP server: {0}:{1}.'.format(Config.get('mail_host'), Config.get('mail_port')))\n smtp = smtplib.SMTP_SSL(Config.get('mail_host'), Config.get('mail_port'))\n V2RayLogger.debug('SMTP login with: {0}:{1}.'.format(Config.get('mail_user'), Config.get('mail_pass')))\n smtp.login(Config.get('mail_user'), Config.get('mail_pass'))\n V2RayLogger.debug('SMTP login successful.')\n\n for row in data:\n V2RayLogger.debug('Send email: {0}:{1}.'.format(row[0], row[1]))\n message = '<tr align=left><th align=\"left\">{0:30s}</th><th align=\"left\">{1:9s}</th></tr>\\n'.format(\n row[0], row[1])\n message = MIMEText(message, 'html')\n message['Subject'] = Header(Config.get('mail_subject') + ': {0}'.format(month))\n message['From'] = Config.get('mail_user')\n message['To'] = row[0]\n\n smtp.sendmail(Config.get('mail_user'), row[0], message.as_string())\n V2RayLogger.info('Send traffic to: {0}.'.format(row[0]))", "def setAnchorDateMonth(self, value):\n normalizedMonth = value - 1\n return self._set(anchorDateMonth=normalizedMonth)", "def _handleRequestPostChargeParameters(self, data):\r\n print(\"\\\"Request Post Charge Parameters\\\" received\")\r\n message = self.whitebeet.v2gParseRequestPostChargeParameters(data)\r\n if 'dc' in message:\r\n print(\"SOC: {}%\".format(message['dc']['soc']))\r\n try:\r\n self.whitebeet.v2gSetDcPostChargeParameters(0, 1, int(self.charger.getEvsePresentVoltage()))\r\n except Warning as e:\r\n print(\"Warning: {}\".format(e))\r\n except ConnectionError as e:\r\n print(\"ConnectionError: {}\".format(e))", "def MONTH(\n serial_number: func_xltypes.XlNumber\n) -> func_xltypes.XlNumber:\n\n date = utils.number_to_datetime(int(serial_number))\n return int(date.strftime(\"%m\"))", "def test_monthly_report(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.expense)\n self.assertEqual(res.status_code, 201)\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=\n {'name': 'soda', 'amount': 200, 'date_of_expense': '10-01-2021'})\n self.assertEqual(rv.status_code, 201)\n fetch = self.client().get('/expenses?name=soda', headers=dict(Authorization=\"Bearer \" + access_token))\n result = json.loads(fetch.data)\n\n consolidated_total = 212.23\n res = self.client().get('/monthly_report?month=01-2021', headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(res.status_code, 200)\n results = json.loads(res.data)\n self.assertEqual(results['consolidated_total'], consolidated_total)", "def problem3_3(month, day, year):\r\n \r\n months = (\"January\", \"February\", \"March\",\"April\",\"May\",\"June\",\"July\",\\\r\n \"August\",\"September\",\"October\",\"November\",\"December\")\r\n month = month - 1 \r\n Month_prin = months[month]\r\n Date_print = Month_prin + \" \" + str(day) + \",\" + \" \" +str(year)\r\n print(Date_print)", "def condition(self, year, month, day, lastday, leapday):\n try:\n if len(day) == 0 or int(day) > int(lastday):\n if int(month) == 2 and day == leapday:\n Input.change_display(self, self.entries[4],\n 'Not a leap year')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter day between 1-' + lastday)\n elif int(day) <= int(lastday):\n Input.change_display(self, self.entries[3], #Weekday message\n Output.message(self, year, month, day))\n except:\n Input.change_display(self, self.entries[4],\n 'Enter day between 1-' + lastday)", "def _build_data_result(self, efem, list_efem):\n result_data = dict()\n result_data[CURRENT_DAY] = list(efem.values())[0]['msj_efem']\n result_data[MONTH] = dict()\n result_data[MONTH] = self._build_efem_month(list_efem)\n return result_data", "def get_business_day_of_month(year, month, count):\n r = rrule.rrule(\n rrule.MONTHLY, byweekday=(rrule.MO, rrule.TU, rrule.WE, rrule.TH, rrule.FR),\n dtstart=datetime.datetime(year, month, 1),\n bysetpos=count)\n res = r[0]\n if (res is None or res.month != month or res.year != year):\n raise ValueError(\"No dates found in range. is there a flaw in your logic?\")\n return res.date()", "def get_currency_exchange_monthly(self, from_symbol, to_symbol, outputsize='compact'):\n _FUNCTION_KEY = 'FX_MONTHLY'\n return _FUNCTION_KEY, \"Time Series FX (Monthly)\", 'Meta Data'", "def changeDisplayedMonth(self):\n #ho bisogno di sapere qual รจ il mese mostrato\n currentMonth = self.indexMonth\n currentYear = self.currentYear\n\n sender = self.sender().objectName()\n if sender == 'bot_next':\n # if currentMonth < 11:\n if self.indexMonth < 11:\n self.indexMonth += 1\n self.setBaseDate(self.baseDate.addMonths(1))\n else:\n self.indexMonth = 0\n self.setCurrentYear(currentYear+1)\n # print('baseDate before', self.baseDate)\n self.setBaseDate(self.baseDate.addMonths(1))\n # print('baseDate after', self.baseDate)\n # print('new Year: ', self.currentYear)\n\n elif sender == 'bot_prev':\n # if currentMonth > 0:\n if self.indexMonth > 0:\n self.indexMonth -= 1\n self.setBaseDate(self.baseDate.addMonths(-1))\n else:\n self.indexMonth = 11\n self.setCurrentYear(currentYear-1)\n self.setBaseDate(self.baseDate.addMonths(-1))\n # print('new Year: ', self.currentYear)\n if currentMonth != self.indexMonth:\n # print(f'currentPageChanged.emit({self.indexMonth})')\n self.currentPageChanged.emit(self.indexMonth)\n self.combo_mesi.setCurrentIndex(self.indexMonth)\n if currentYear != self.currentYear:\n # print('current year changed')\n self.setListaGiorniDellAnno(self.createDates(self.baseDate), self.indexMonth)", "def EDATE(start_date, months):\n return DATE(start_date.year, start_date.month + months, start_date.day)", "def set_start():\n app.logger.debug(\"Got a JSON set_start post\");\n global dateFormat\n reply = {}\n\n flask.session[\"bStart\"] = request.form[\"bStart\"]\n flask.session[\"bLength\"] = request.form[\"bLength\"]\n bLength = int(request.form[\"bLength\"])\n try:\n start = arrow.get(flask.session[\"bStart\"], \"YYYY/MM/DD HH:mm\")\n except:\n reply[\"message\"] = \"Bad date Time.\"\n return jsonify(result=reply)\n \n brevet = AcpBrevet(bLength, start)\n open_limit = brevet.calc_open(0,bLength)\n close_limit = brevet.calc_close(0,bLength)\n\n reply[\"message\"] = \"Start of event and length set.\"\n reply[\"open\"] = open_limit.format(dateFormat)\n reply[\"close\"] = close_limit.format(dateFormat)\n return jsonify(result=reply)", "def get_month(self, indate):\n return indate.strftime(\"%B\") + \"-\" + indate.strftime(\"%Y\")", "def set_period(self, yearmonth):\n if not isinstance(yearmonth, int):\n yearmonth = int(yearmonth)\n year = int(yearmonth / 100)\n if self._set_year(year) is False:\n return False\n return self._set_month(yearmonth % year)", "def GEEmonthTRMM(ptsFile,startYear,endYear,buf,poly,username,folderOut, scalePix = 25000):\n \n # load required libraries\n import ee\n\n \n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n years = list(range(startYear, endYear + 1))\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n ID_field = \"geeID\"\n \n TRMM = ee.ImageCollection('TRMM/3B43V7').select('precipitation')\n \n img_col = TRMM.filter(ee.Filter.calendarRange(startYear, endYear, 'year'))\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n def table_m(image):\n table = (image\n .select('precipitation')\n .reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 'rm_TRMM_pr_'+str(years[0])+'_'+str(years[len(years)-1])+'_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print ('buffered pts by:' + str(buf) + ' for TRMM')\n\n elif poly > 0:\n \n def table_m(image):\n table = (image\n .select('precipitation')\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 'rm_TRMM_pr_'+str(years[0])+'_'+str(years[len(years)-1])+'_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('spatial mean in poly: no buffer for TRMM')\n\n else:\n def table_m(image):\n table = (image\n .select('precipitation')\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 'rm_TRMM_pr_'+str(years[0])+'_'+str(years[len(years)-1])+'_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print('value at point: no buffer for TRMM')" ]
[ "0.5634143", "0.54065627", "0.53806704", "0.5306783", "0.5197246", "0.5181283", "0.5181283", "0.51676494", "0.51546735", "0.51468223", "0.51246095", "0.51028955", "0.5069277", "0.50182337", "0.5016101", "0.49582988", "0.49270567", "0.49263218", "0.48897153", "0.483824", "0.48085865", "0.47966108", "0.47846863", "0.47768712", "0.47699732", "0.47590715", "0.4735782", "0.4734561", "0.47302622", "0.47026685", "0.47008365", "0.46992663", "0.4687681", "0.46832028", "0.46584135", "0.46452528", "0.46371725", "0.46176738", "0.45939484", "0.45923042", "0.45886052", "0.45877647", "0.45777723", "0.4575565", "0.4564497", "0.45543018", "0.45502958", "0.45403495", "0.4529948", "0.45292947", "0.45234317", "0.4514992", "0.45125306", "0.45105788", "0.4501429", "0.4500113", "0.44971952", "0.4493883", "0.44919556", "0.4491124", "0.4491124", "0.44715738", "0.44559374", "0.44519052", "0.4451002", "0.44451082", "0.44443184", "0.44425997", "0.44273734", "0.44248188", "0.44177008", "0.44168526", "0.44114968", "0.44103748", "0.43899968", "0.4389862", "0.4382991", "0.43771803", "0.436527", "0.4355745", "0.43520635", "0.4351423", "0.43455103", "0.4343915", "0.4342159", "0.43378538", "0.4331657", "0.43315247", "0.43267372", "0.43247005", "0.43142116", "0.43100485", "0.43074104", "0.4303684", "0.43032026", "0.42957965", "0.4290266", "0.428991", "0.4285167", "0.42832065", "0.42830414" ]
0.0
-1
Send boundary values EMonth field (EMonth= 13)
def test_20(self): assert 'False' == Api.requestBlock('test-20')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_Month(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Month', value)", "def effective_invoice_month(self) -> pulumi.Input['GoogleTypeDateArgs']:\n return pulumi.get(self, \"effective_invoice_month\")", "def _build_efem_month(self,list_efem_month):\n\n def add_value_dict(key, dict_data, msj=None):\n if key in dict_data.keys():\n dict_data[key] = dict_data[key] + [msj]\n else:\n dict_data[key] = [msj]\n return dict_data\n\n result_data = dict()\n for efem in list_efem_month:\n datetime_efem = efem.date_efem\n result_data = add_value_dict(str(datetime_efem.day), result_data, efem.msj_efem)\n return result_data", "def month_adj():\n\n user_id = current_identity.id\n dayDate = request.form.get(\"dayDate\")\n newVal = request.form.get(\"newVal\")\n elemName = request.form.get(\"ElemName\")\n\n day = parse_day(dayDate)\n month = parse_month(dayDate)\n year = parse_year(dayDate)\n\n commit_adj_to_db(user_id, day, month, year, newVal, elemName)\n\n response = {\"status\" : \"ok\"}\n\n return jsonify(response)", "def set_start_month(self, month):\n return self.form.set_value(\"output period \\\"month from\\\"\", MONTHS[month - 1])", "def set_finish_month(self, month):\n return self.form.set_value(\"output period \\\"month to\\\"\", MONTHS[month - 1])", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"MTM\", MTM_MINS_COST)\n self.bill.add_fixed_cost(MTM_MONTHLY_FEE)", "def month(self):\n return 0", "def month(self):\n return 0", "def month(self, month):\n\n self._month = month", "def setMonth(self, *args):\n return _libsbml.Date_setMonth(self, *args)", "def monthly_schedule(self,month):\n response = requests.get(f'http://company.com/{self.lname}/{month}')\n if response.ok:\n return response.text\n else:\n return 'Bad Response!'", "def get_month():\n return handle_invalid_inputs(question_3, months)", "def test_monthly_report_error(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.expense)\n self.assertEqual(res.status_code, 201)\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=\n {'name': 'soda', 'amount': 200, 'date_of_expense': '10-01-2021'})\n month = 4567\n res = self.client().get(f'/monthly_report?month={month}', headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(res.status_code, 400)\n results = json.loads(res.data)\n self.assertEqual(results['message'], f'The date {month} does not match the format MM-YYYY')", "def set_month(self, month):\r\n\t\tmonths = ['Enero', 'Febrero', 'Marzo', 'Abril',\r\n\t\t\t\t 'Mayo', 'Junio', 'Julio', 'Agosto'\r\n\t\t\t\t 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre']\r\n\t\tfor i in range(12):\r\n\t\t\tif month == i: \r\n\t\t\t\treturn months[i-1]", "def set_month(self, month):\n # if the sting value of month is correct then we transform it into an\n # integer\n if isinstance(month, str):\n if month in MONTH_STR:\n month_int = MONTH_STR.index(month) + 1\n else:\n raise ValueError(\"Weekday as a string can only take the value {}\".format(MONTH_STR))\n else:\n month_int = month\n\n # Check if month_int in good range\n if month_int not in range(1, 13):\n raise ValueError(\"Month value must be in range [1..12] but is {}\".format(month_int))\n\n # First we separate the tens and the digit\n tens, digit = divmod(int(month_int), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to the register\n self.__write_register(_REGISTER_MONTH, reg_value)", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"TERM\", TERM_MINS_COST)\n self.bill.add_fixed_cost(TERM_MONTHLY_FEE)\n if not ((self.end.month < month and self.end.year <= year) or\n self.end.year < year):\n # refresh included minutes and SMSs\n self.bill.add_free_minutes((-1) * self.bill.free_min)\n if self.start.month == month and self.start.year == year:\n # if first month, add term deposit to bill.\n self.bill.add_fixed_cost(TERM_DEPOSIT)\n else:\n self._carried_term = True", "def month():\n \n # get month entered by user - if no month entered default to current month\n month = request.args.get(\"month\", datetime.now().strftime(\"%Y-%m\"))\n \n # get budget data for month as a dictionary\n data = budget_data(month)\n \n return json.dumps(data)", "def month_days():\n user_id = current_identity.id\n month = request.args.get(\"month\")\n year = request.args.get(\"year\")\n \n if user_id:\n \n response = {\n \"status\": None,\n \"dateArray\" : []\n }\n possibleDateArr = format_dayArray(month, year, user_id)\n\n if not possibleDateArr:\n return jsonify({\"status\" : \"error\"})\n\n response[\"dateArray\"] = possibleDateArr\n \n response[\"status\"] = \"ok\"\n \n return jsonify(response)", "def test_invalid_month_orig(self):\n year, month, error = clean_year_month(2014, 3, 13)\n self.assertEqual(year, 2014)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def eme_500day_2018(self):\n a = de421_planets()\n self.FBseq = a.eme_500day_2018(self.MARS_DIST)\n self.N = len(self.FBseq)\n self.partials = 'eme_500day_2018_single'\n self.finals = 'eme_500day_2018_multi'", "def test_valid_month(self):\n ar_month = self.ar[2009][11]\n self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))", "def month_content():\n \n user_id = current_identity.id\n month = request.form.get(\"month\")\n year = request.form.get(\"year\")\n \n if not is_month(month, year, user_id):\n establish_month(month, year, user_id)\n\n dayContentDict = format_day_content(month, year, user_id)\n\n d = collections.defaultdict(dict)\n response = {\n \"status\": \"ok\",\n \"dayContent\" : d\n }\n \n if dayContentDict:\n response[\"dayContent\"] = dayContentDict\n \n return jsonify(response)", "def eomday(year, month):\n if hasattr(year, '__iter__'):\n assert hasattr(month, '__iter__')\n return np.array([calendar.monthrange(y, m)[-1] for y, m in zip(year, month)])\n else:\n return calendar.monthrange(year, month)[-1]", "def get_end_month(month):\n return datetime(2020, month, 28)", "def day_of_months(self, year, month, day):\n if month.isdigit() and int(month) < 13:\n if (int(month) in [1,3,5,7,8,10,12]):\n Input.condition(self, year, month, day, '31', '')\n elif (int(month) in [4,6,9,11]):\n Input.condition(self, year, month, day, '30', '')\n elif int(month) == 2:\n if (((int(year) % 4) == 0 and\n not (int(year) % 100) == 0)\n or (int(year) % 400) == 0):\n if int(year) == 1712 and int(day) == 30:\n \"\"\"Easter Egg.\"\"\"\n Input.condition(self, year, month, day, '30','')\n Input.special_case(self)\n else:\n Input.condition(self, year, month, day, '29',' ')\n else:\n Input.condition(self, year, month, day, '28', '29')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter month between 1-12 or month name')", "def _set_month(self, month) -> bool:\n if self.set_start_month(month) is False:\n return False\n return self.set_finish_month(month)", "def day_of_month(self, day_of_month):\n\n self._day_of_month = day_of_month", "def month(m=0):\n if not 1 <= m <= 12:\n # throw error\n return jsonify({}), status.HTTP_400_BAD_REQUEST\n holidays = Holidays.query.filter_by(month=m).all()\n\n this_month = {}\n for h in holidays:\n this_month[h.day] = this_month.get(h.day, []) + [h.holiday]\n\n return jsonify({\"month\": m, \"holidays\": this_month})", "def test_bad_quarter_or_month(self):\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"12/2016\",\n \"reporting_period_end_date\": \"13/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n # make sure date checks work as expected for an existing submission\n \"existing_submission_id\": self.status_check_submission_id,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"AB/2016\",\n \"reporting_period_end_date\": \"CD/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"Q1/ABCD\",\n \"reporting_period_end_date\": \"Q2/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n raise NotImplementedError", "def process_month(self):\n if self.balance > 0:\n # if positive balance, convert APR to monthly multiplicative factor\n monthly_factor = pow(1 + self.apr, 1 / 12)\n self.balance *= monthly_factor", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"PREPAID\", PREPAID_MINS_COST)\n if self.balance > (-10.0):\n self.balance += (-25.0)\n self.bill.add_fixed_cost(self.balance)", "def month_digit(self, year, month, day):\n if (month[0] == '0' and len(month) > 2 or\n day.isdigit() and day[0] == '0' and len(day) > 2):\n Input.change_display(self, self.entries[4],\n 'Remove preceding zeros')\n else:\n Input.day_of_months(self, year, month, day.lstrip('0'))", "def setIndexMonth(self,index):\n self.indexMonth = index", "def add_month(cab_data):\n return cab_data.assign(month=lambda x: x.time.dt.month)", "def aMonth(self):\n return self._amon", "def test_cells_charts_post_chart_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_value_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def parse_month(self, response):\n month, year = response.meta[\"month\"], response.meta[\"year\"]\n dates = response.json().get(\"data\", [])\n\n for gazette_name in dates.values():\n date = re.search(self.DATE_REGEX, gazette_name).group()\n\n if date is None:\n continue\n\n date = parse(date, settings={\"DATE_ORDER\": \"DMY\"}).date()\n\n if date < self.start_date:\n continue\n\n url = f\"{self.GAZETTE_URL}?dir={year}/{month}/{gazette_name}\"\n yield Request(url, callback=self.parse_gazette)", "def monthly_day(self, monthly_day):\n\n self._monthly_day = monthly_day", "def calendar_month(year, month):\n start = datetime.datetime(year, month, 1)\n if month == 12:\n end = datetime.datetime(year+1, 1, 1)\n else:\n end = datetime.datetime(year, month+1, 1)\n print(start)\n print(end)\n return start, end", "def test_cells_charts_post_chart_series_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_series_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def _get_months(self, cr, uid, context):\n months=[(str(n),str(n)) for n in range(1,13)]\n return months", "def E_Dynamic_MavkoEtAl2009(rhob,DTS,PR):\n E = (2*(rhob*1000)*((304800/DTS)**2)*(1+PR))/1000000\n return E", "def _get_eur_gbp_last_month(self) -> None:\n last_month = _last_month()\n data = _get_ecb_data(FREQUENCY_MONTHLY, last_month, last_month)\n\n self.eur_gbp_last_month = _get_latest_ecb_rate(data)", "def get_month(x):\n return x[\"SALE DATE\"].month", "def test_parameter_checking(self):\n params = ['12-2015', '12-15', '2015_12', '2015.12', '12.2015', '12/2015', '12/15', '2015-00', '2015-13']\n # list of examples of possible wrong month parameters\n for month in params:\n with self.assertRaises(ValueError) as cm: # ValueError should be raised with proper message\n download_data(month)\n self.assertEqual('Month parameter should be in form `yyyy-mm`', cm.exception.args[0], msg=month)\n # check if error message was as expected", "def get_api_info_option_month_and_underlying_month_r(\n self,\n\n headers: t.Dict[str, str] = None,\n body: JSONEncodable = None,\n fields_data: t.Dict[str, str] = None,\n **kwargs\n ):\n r = self._do_call(\n method='GET',\n url=f'{self.API_BASE_URL}/info/option-month-and-underlying-month',\n headers=headers,\n body=body,\n fields=fields_data,\n **kwargs\n )\n return r", "def _set_value_date_32A(self, val):\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount = val\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount.swiftTag = \"32A\"", "def month(self):\n return self.__month", "def test_date_accept_this_month(self):\n spi_search = \"find date this month\"\n inv_search = \"year:\" + datetime.datetime.strftime(datetime.datetime.today(), '%Y-%m')\n self._compare_searches(inv_search, spi_search)", "def _boundary_value(self) -> str:\n ...", "def set_value(self, value):\n\t\tassert len(value.split('-')) > 1, 'A minimum of year and month are needed.'\n\t\tself.values = map(int, value.split('-'))", "def test_cells_charts_post_chart_second_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_second_value_axis(name, sheet_name,chartIndex, axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def calendarPageChanged(self, year, month):\n success = self.porker_thread.extendDates(datetime.date(year, month, 1))\n #if not success:\n # self.alertMessage(\"Failure!\",\"Unable to extend the thread's dates for some reason.\")\n #efficiency = self.porker_thread.getEfficiencyFor(self.getActiveDate())\n #self.porker_thread.sentDatesData = False", "def take_monthly_agreements():\n\n #collect data from form template\n chore_id = request.form.get(\"chore_id\")\n date_monthly = request.form.get(\"date_monthly\")\n\n #add agreements to database\n dbwrangler.add_commitment(date_monthly, chore_id)\n\n #redirect to form for further agreements\n return redirect(\"/takeachore\")", "def test_put_wrong_data(self):\n new_data = {\"fromMonth\": \"another\"}\n response = self.client.put(self.url + str(self.current_data[-1]['id']) + '/', data=json.dumps(new_data),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())", "def GetAvailabilityForMonth(year, month, VerkehrstageHex):\r\n\t# Get Last Day of the Month in Python\r\n\t# http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python\r\n\t\"\"\"\r\n\tmonthrange(year, month):\r\n Returns weekday of first day of the month and number of days in month\r\n \"\"\"\r\n\tDayRange = calendar.monthrange(year,month)\r\n\tMonthEndDay = DayRange[1]\r\n\r\n\tStartDate = date(year,month,1)\r\n\tEndDate = date(year,month,MonthEndDay)\t\t\r\n\treturn GetAvailabilityBetweenDates(StartDate, EndDate, VerkehrstageHex)", "def _check_date(self, cr, uid,ids, context=None):\n for act in self.browse(cr, uid, ids, context):\n line_date = datetime.strptime(str(act.date), \"%Y-%m-%d\")\n if int(line_date.month)!=int(act.enrich_id.month) or int(line_date.year)!=int(act.enrich_id.year):\n raise osv.except_osv(_('ValidateError'), _(\"Payment Enrich Date Must Be Within Enrich Month And Year %s - %s\")%(act.enrich_id.month,act.enrich_id.year))\n return True", "def mm(self):\n return '%02d' % self._month", "def __getMonth(self,xml):\n\t\t#TODO: Monat, Jahr, SollStunden, Urlaub,ZeitdiffAkt, ZeitdiffVor, erweitert\n\t\tdayTypeMapping = {'Arbeitstag': DayType.work,\n\t\t\t\t\t\t'Wochenende': DayType.weekend,\n\t\t\t\t\t\t'Urlaub': DayType.vacation,\n\t\t\t\t\t\t'Feiertag': DayType.holiday,\n\t\t\t\t\t\t'Krankheit': DayType.illness,\n\t\t\t\t\t\t'รœberstunden genommen': DayType.overtime_free,\n\t\t\t\t\t\t'Dienstreise': DayType.business_trip,\n\t\t\t\t\t\t'Freistellung': DayType.unpaid_free}\n\t\tworkdays = {}\n\t\tmonthNum = int(xml.find('Monat').text)\n\t\tyearNum = int(xml.find('Jahr').text)\n\t\tif xml.find('Erweitert').text == 'true':\n\t\t\textendedFormat = True\n\t\telse:\n\t\t\textendedFormat = False\n\t\tfor panday in xml.findall('Tag'):\n\t\t\t# parse\n\t\t\tnumday = int(panday.find('Datum').text)\n\t\t\tdaytype = panday.find('TagesTyp').text\n\t\t\tdescription = panday.find('Bemerkung').text\n\t\t\tmorning = panday.find('Vormittag').text\n\t\t\tafternoon = panday.find('Nachmittag').text\n\t\t\tif extendedFormat:\n\t\t\t\tthird = panday.find('Dritte').text\n\t\t\t\tfourth = panday.find('Vierte').text\n\t\t\telse:\n\t\t\t\tthird = None\n\t\t\t\tfourth = None\n\t\t\t# convert\n\t\t\tdaytype = dayTypeMapping[daytype]\n\t\t\tmorning = self. _parsePANTimeRange(morning)\n\t\t\tafternoon = self. _parsePANTimeRange(afternoon)\n\t\t\tthird = self. _parsePANTimeRange(third)\n\t\t\tfourth = self. _parsePANTimeRange(fourth)\t\t\t\n\t\t\ttimeblocks = [morning, afternoon, third, fourth]\n\t\t\ttimeblocks = list(filter(None, timeblocks))\n\t\t\t# save\n\t\t\tday = WorkDay(daytype, description, timeblocks)\n\t\t\tworkdays[numday] = day\n\t\tmonth = WorkMonth(yearNum,monthNum,workdays)\n\t\treturn month", "def __get_step1_end_month(yaml_content: dict) -> str:\n\n end_month = None\n\n try:\n end_month = yaml_content['step1.end_month']\n except KeyError as exc:\n print(ConfigurationFactory.__get_key_missing_error_message(exc))\n\n return end_month", "def comp_month_range():\n word_months = _(\"months\")\n word_month = _(\"month\")\n COMP_MONTH_LIST = (\n (12, '- 12 ' + word_months),\n (11, '- 11 ' + word_months),\n (10, '- 10 ' + word_months),\n (9, '- 9 ' + word_months),\n (8, '- 8 ' + word_months),\n (7, '- 7 ' + word_months),\n (6, '- 6 ' + word_months),\n (5, '- 5 ' + word_months),\n (4, '- 4 ' + word_months),\n (3, '- 3 ' + word_months),\n (2, '- 2 ' + word_months),\n (1, '- 1 ' + word_month),\n )\n return COMP_MONTH_LIST", "def on_change_mission_order(self, cr, uid, ids , mission_order_id , context=None):\n if context is None:\n context = {}\n res ={}\n result = []\n \n mission_order = self.pool.get('hr.employee.mission').browse(cr,uid,mission_order_id )\n \n return {'value': { 'start_grant_date': mission_order.start_date, \n 'end_grant_date': mission_order.end_date }}", "def get_main_date(self, kwargs):\n month = int(kwargs['month'])\n if month >= 9 and month <= 12:\n main_date = datetime(int(kwargs['year_from']), month, 1)\n else:\n main_date = datetime(int(kwargs['year_to']), month, 1)\n return main_date", "def month(self):\n return self._months", "def set_statement_received_date(self, received_date):\n if received_date != \"\":\n self.set_value_into_input_field(self.received_date_locator, received_date)\n else:\n received_date_to_set = self.first_day_of_current_month\n self.set_value_into_input_field(self.received_date_locator, received_date_to_set)", "def pMonth(self):\n return self._pmon", "def months(self, months):\n allowed_values = [\"january\", \"feburary\", \"march\", \"april\", \"may\", \"june\", \"july\", \"august\", \"september\", \"october\", \"november\", \"december\"] # noqa: E501\n if not set(months).issubset(set(allowed_values)):\n raise ValueError(\n \"Invalid values for `months` [{0}], must be a subset of [{1}]\" # noqa: E501\n .format(\", \".join(map(str, set(months) - set(allowed_values))), # noqa: E501\n \", \".join(map(str, allowed_values)))\n )\n\n self._months = months", "def _handleRequestPostChargeParameters(self, data):\r\n print(\"\\\"Request Post Charge Parameters\\\" received\")\r\n message = self.whitebeet.v2gParseRequestPostChargeParameters(data)\r\n if 'dc' in message:\r\n print(\"SOC: {}%\".format(message['dc']['soc']))\r\n try:\r\n self.whitebeet.v2gSetDcPostChargeParameters(0, 1, int(self.charger.getEvsePresentVoltage()))\r\n except Warning as e:\r\n print(\"Warning: {}\".format(e))\r\n except ConnectionError as e:\r\n print(\"ConnectionError: {}\".format(e))", "def __init__(__self__, *,\n day_of_month: pulumi.Input[int],\n hand_off_time: pulumi.Input[str]):\n pulumi.set(__self__, \"day_of_month\", day_of_month)\n pulumi.set(__self__, \"hand_off_time\", hand_off_time)", "def validate_emprestimo_post_body(request_body: dict):\n required_fields = [\n 'valor_nominal',\n 'taxa_juros',\n 'banco',\n 'nome_cliente'\n ]\n request_fields = request_body.keys()\n\n for current_required_field in required_fields:\n if current_required_field not in request_fields:\n raise MissingRequiredFields(code=400)\n\n if not isinstance(request_body.get('taxa_juros'), float):\n raise InvalidFieldType(code=400)\n\n if not isinstance(request_body.get('valor_nominal'), float):\n raise InvalidFieldType(code=400)\n\n if not isinstance(request_body.get('banco'), str):\n raise InvalidFieldType(code=400)\n\n if not isinstance(request_body.get('nome_cliente'), str):\n raise InvalidFieldType(code=400)\n\n if request_body.get('valor_nominal') <= 0 or request_body.get('taxa_juros') <= 0:\n raise InvalidFieldValue(code=400)\n\n return", "def cc_expire_months():\n months = []\n for month in range(1, 13):\n if len(str(month)) == 1:\n numeric = '0' + str(month)\n else:\n numeric = str(month)\n months.append((numeric, datetime.date(2009, month, 1).strftime('%B')))\n return months", "def check_dates(self, kwargs):\n month = int(kwargs['month'])\n if (int(kwargs['year_from']) >= int(kwargs['year_to'])) or \\\n (month < 1 or month > 12):\n # kdyby datumy byly nejake dodrbane, tak se sverime do pece autoredirectu\n return HttpResponseRedirect(reverse('admin_redir'))\n return None", "def month(self):\n return self._month", "def month(self):\n return self._month", "def problem3_3(month, day, year):\r\n \r\n months = (\"January\", \"February\", \"March\",\"April\",\"May\",\"June\",\"July\",\\\r\n \"August\",\"September\",\"October\",\"November\",\"December\")\r\n month = month - 1 \r\n Month_prin = months[month]\r\n Date_print = Month_prin + \" \" + str(day) + \",\" + \" \" +str(year)\r\n print(Date_print)", "def set_start():\n app.logger.debug(\"Got a JSON set_start post\");\n global dateFormat\n reply = {}\n\n flask.session[\"bStart\"] = request.form[\"bStart\"]\n flask.session[\"bLength\"] = request.form[\"bLength\"]\n bLength = int(request.form[\"bLength\"])\n try:\n start = arrow.get(flask.session[\"bStart\"], \"YYYY/MM/DD HH:mm\")\n except:\n reply[\"message\"] = \"Bad date Time.\"\n return jsonify(result=reply)\n \n brevet = AcpBrevet(bLength, start)\n open_limit = brevet.calc_open(0,bLength)\n close_limit = brevet.calc_close(0,bLength)\n\n reply[\"message\"] = \"Start of event and length set.\"\n reply[\"open\"] = open_limit.format(dateFormat)\n reply[\"close\"] = close_limit.format(dateFormat)\n return jsonify(result=reply)", "def EDATE(start_date, months):\n return DATE(start_date.year, start_date.month + months, start_date.day)", "def month(self, month: str):\n return get_from_list(self.months, \"month\", month)", "def condition(self, year, month, day, lastday, leapday):\n try:\n if len(day) == 0 or int(day) > int(lastday):\n if int(month) == 2 and day == leapday:\n Input.change_display(self, self.entries[4],\n 'Not a leap year')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter day between 1-' + lastday)\n elif int(day) <= int(lastday):\n Input.change_display(self, self.entries[3], #Weekday message\n Output.message(self, year, month, day))\n except:\n Input.change_display(self, self.entries[4],\n 'Enter day between 1-' + lastday)", "def _build_data_result(self, efem, list_efem):\n result_data = dict()\n result_data[CURRENT_DAY] = list(efem.values())[0]['msj_efem']\n result_data[MONTH] = dict()\n result_data[MONTH] = self._build_efem_month(list_efem)\n return result_data", "def __next_month(self, year, month):\n year, month = (year, month + 1) if month < 12 else (year + 1, 1)\n\n return self.create(year, month)", "def get_currency_exchange_monthly(self, from_symbol, to_symbol, outputsize='compact'):\n _FUNCTION_KEY = 'FX_MONTHLY'\n return _FUNCTION_KEY, \"Time Series FX (Monthly)\", 'Meta Data'", "def send_mail(month: str, data: list):\n\n V2RayLogger.debug('SMTP server: {0}:{1}.'.format(Config.get('mail_host'), Config.get('mail_port')))\n smtp = smtplib.SMTP_SSL(Config.get('mail_host'), Config.get('mail_port'))\n V2RayLogger.debug('SMTP login with: {0}:{1}.'.format(Config.get('mail_user'), Config.get('mail_pass')))\n smtp.login(Config.get('mail_user'), Config.get('mail_pass'))\n V2RayLogger.debug('SMTP login successful.')\n\n for row in data:\n V2RayLogger.debug('Send email: {0}:{1}.'.format(row[0], row[1]))\n message = '<tr align=left><th align=\"left\">{0:30s}</th><th align=\"left\">{1:9s}</th></tr>\\n'.format(\n row[0], row[1])\n message = MIMEText(message, 'html')\n message['Subject'] = Header(Config.get('mail_subject') + ': {0}'.format(month))\n message['From'] = Config.get('mail_user')\n message['To'] = row[0]\n\n smtp.sendmail(Config.get('mail_user'), row[0], message.as_string())\n V2RayLogger.info('Send traffic to: {0}.'.format(row[0]))", "def set_M(self, morb):\n if not (0 < morb <= 12):\n raise ValueError\n self.set_par('Df_cnf_Fock', '')\n self.set_par('MORB', morb)", "def news_for_month(self):\n\n raise NotImplementedError", "def MONTH(\n serial_number: func_xltypes.XlNumber\n) -> func_xltypes.XlNumber:\n\n date = utils.number_to_datetime(int(serial_number))\n return int(date.strftime(\"%m\"))", "def getSpecificMonth(self, month, year):\n try:\n specificMonth = []\n args = [month, year]\n months = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"June\",\n \"July\", \"Aug\", \"Sept\", \"Oct\", \"Nov\", \"Dec\"]\n result_args = self.cursor.callproc(\"monthly_prev_months\", args)\n for result in self.cursor.stored_results():\n for r in result:\n specificMonth = [months[r[0]-1] +\n \" \" + str(r[1]), str(r[2])]\n return list(specificMonth)\n\n except Exception as e:\n return \"Error:\" + e", "def changeDisplayedMonth(self):\n #ho bisogno di sapere qual รจ il mese mostrato\n currentMonth = self.indexMonth\n currentYear = self.currentYear\n\n sender = self.sender().objectName()\n if sender == 'bot_next':\n # if currentMonth < 11:\n if self.indexMonth < 11:\n self.indexMonth += 1\n self.setBaseDate(self.baseDate.addMonths(1))\n else:\n self.indexMonth = 0\n self.setCurrentYear(currentYear+1)\n # print('baseDate before', self.baseDate)\n self.setBaseDate(self.baseDate.addMonths(1))\n # print('baseDate after', self.baseDate)\n # print('new Year: ', self.currentYear)\n\n elif sender == 'bot_prev':\n # if currentMonth > 0:\n if self.indexMonth > 0:\n self.indexMonth -= 1\n self.setBaseDate(self.baseDate.addMonths(-1))\n else:\n self.indexMonth = 11\n self.setCurrentYear(currentYear-1)\n self.setBaseDate(self.baseDate.addMonths(-1))\n # print('new Year: ', self.currentYear)\n if currentMonth != self.indexMonth:\n # print(f'currentPageChanged.emit({self.indexMonth})')\n self.currentPageChanged.emit(self.indexMonth)\n self.combo_mesi.setCurrentIndex(self.indexMonth)\n if currentYear != self.currentYear:\n # print('current year changed')\n self.setListaGiorniDellAnno(self.createDates(self.baseDate), self.indexMonth)", "def month(self) -> str:\r\n return self._month", "def Month(self):\n return self._fmon", "def test_get_occurrences_monthly_mid_month(self):\n print()\n print(\"Get occurrences of a monthly expense between:\")\n expense = BudgetExpense.objects.get(id = 100)\n start_date = expense.start_date\n end_date = start_date + timedelta(days = 40)\n print(start_date.strftime(\"%B %d, %y\")+\" and \"+end_date.strftime('%B %d, %y'))\n print(\"======================================\")\n result = get_anticipated_transaction_occurences(anticipated_transaction= expense, start_date = start_date, end_date = end_date)\n result_dates = []\n for current_expense in result.keys():\n print(current_expense)\n print(\"========================\")\n result_dates.extend(result.get(current_expense))\n for current_date in result_dates:\n print(\"Date: \"+current_date.strftime(\"%B %d, %y %T\"))\n print(\"======================\")\n print()\n date_1 = start_date\n days_in_month = monthrange(start_date.year, start_date.month)[1]\n date_2 = start_date + timedelta(days = days_in_month)\n \n self.assertEquals([date_1, date_2], result_dates)", "def test_fix_year_month_next_gt_12(self):\n # 23 - 10 = next query of 13\n year, month, error = clean_year_month(2014, 23, 10)\n self.assertEqual(year, 2015)\n self.assertEqual(month, 11)\n self.assertEqual(error, False)", "def EOMONTH(\n start_date: func_xltypes.XlDateTime,\n months: func_xltypes.XlNumber\n) -> func_xltypes.XlNumber:\n delta = relativedelta(months=int(months))\n edate = utils.number_to_datetime(int(start_date)) + delta\n\n if edate <= utils.EXCEL_EPOCH:\n raise xlerrors.NumExcelError(\n f\"Date result before {utils.EXCEL_EPOCH}\")\n\n eomonth = edate + relativedelta(day=31)\n\n return utils.datetime_to_number(eomonth)", "def mapMaufromByte(self, date, bytes):\n sMonth = date.strftime(self.config.MONTH_FORMAT)\n reKey = self.config.dau_keys_conf['mau'].format(month=sMonth)\n redis_cli = self.get_redis_cli()\n logging.debug('Save mau from bytes: %s' % reKey)\n redis_cli.set(reKey, bytes)", "def EOMONTH(start_date, months):\n return DATE(start_date.year, start_date.month + months + 1, 1) - datetime.timedelta(days=1)", "def evme_2021(self):\n a = de421_planets()\n self.FBseq = a.evme_2021(self.VENUS_DIST, self.MARS_DIST)\n self.N = len(self.FBseq)\n self.partials = 'evme_2021_single'\n self.finals = 'evme_2021_multi'", "def month_bounds(year, month):\n year = int(year)\n month = int(month)\n month_start = datetime.strptime('%s,%s,1' % (year, month),'%Y,%m,%d')\n # days_in_month returns a tuple(weekday, days) where\n # weekday is the eekday the month starts on and days is the number of days in the month\n days_in_month = calendar.monthrange(year,month)\n month_end = month_start + timedelta(days=days_in_month[1]-1)\n return (month_start, month_end)", "def whFixMonth(self, whAdr=0, whT=0, month=int(datetime.now().strftime(\"%m\"))):\n\t\twhFixM = False\n\t\tvalAdr = {\n\t\t\t1:{0:'\\x02\\xAA', 1:'\\x02\\xBB', 2:'\\x02\\xCC', 3:'\\x02\\xDD', 4:'\\x02\\xEE'},\n\t\t\t2:{0:'\\x02\\xFF', 1:'\\x03\\x10', 2:'\\x03\\x21', 3:'\\x03\\x32', 4:'\\x03\\x43'},\n\t\t\t3:{0:'\\x03\\x54', 1:'\\x03\\x65', 2:'\\x03\\x76', 3:'\\x03\\x87', 4:'\\x03\\x98'},\n\t\t\t4:{0:'\\x03\\xA9', 1:'\\x03\\xBA', 2:'\\x03\\xCB', 3:'\\x03\\xDC', 4:'\\x03\\xED'},\n\t\t\t5:{0:'\\x03\\xFE', 1:'\\x04\\x0F', 2:'\\x04\\x20', 3:'\\x04\\x31', 4:'\\x04\\x42'},\n\t\t\t6:{0:'\\x04\\x53', 1:'\\x04\\x64', 2:'\\x04\\x75', 3:'\\x04\\x86', 4:'\\x04\\x97'},\n\t\t\t7:{0:'\\x04\\xA8', 1:'\\x04\\xB9', 2:'\\x04\\xCA', 3:'\\x04\\xDB', 4:'\\x04\\xEC'},\n\t\t\t8:{0:'\\x04\\xFD', 1:'\\x05\\x0E', 2:'\\x05\\x1F', 3:'\\x05\\x30', 4:'\\x05\\x41'},\n\t\t\t9:{0:'\\x05\\x52', 1:'\\x05\\x63', 2:'\\x05\\x74', 3:'\\x05\\x85', 4:'\\x05\\x96'},\n\t\t\t10:{0:'\\x05\\xA7', 1:'\\x05\\xB8', 2:'\\x05\\xC9', 3:'\\x05\\xDA', 4:'\\x05\\xEB'},\n\t\t\t11:{0:'\\x05\\xFC', 1:'\\x06\\xD0', 2:'\\x06\\x1E', 3:'\\x06\\x2F', 4:'\\x06\\x40'},\n\t\t\t12:{0:'\\x06\\x51', 1:'\\x06\\x62', 2:'\\x06\\x73', 3:'\\x06\\x84', 4:'\\x06\\x95'}\n\t\t}\n\t\twhFixMonthCmd = chr(whAdr) + '\\x06\\x02' + valAdr[month][whT] + '\\x10'\n\t\tlogging.info(u'ะงั‚ะตะฝะธะต ะฟะพะบะฐะทะฐะฝะธะน ะฟั€ะธะฑะพั€ะฐ ัƒั‡ะตั‚ะฐ ะฝะฐ ะฝะฐั‡ะฐะปะพ ะผะตััั†ะฐ: %s' % str(whAdr))\n\t\tans = self.cmdWR(whFixMonthCmd)\n\t\tif ans and self._whAnsCheck(whAdr, ans):\n\t\t\ttry:\n\t\t\t\twhFixMA = int(ans[2] + ans[1] + ans[4] + ans[3], 16) * 0.0005\n\t\t\t\twhFixMR = int(ans[10] + ans[9] + ans[12] + ans[11], 16) * 0.0005\n\t\t\t\twhFixM = {'A':whFixMA, 'R':whFixMR}\n\t\t\texcept Exception, e:\n\t\t\t\tlogging.error(u'ะะต ัƒะดะฐะปะพััŒ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ั‡ั‚ะตะฝะธะต ะทะฐั„ะธะบัะธั€ะพะฒะฐะฝะฝั‹ั… ะฟะพะบะฐะทะฐะฝะธะน ะฟั€ะธะฑะพั€ะฐ ัƒั‡ะตั‚ะฐ ะฝะฐ ะฝะฐั‡ะฐะปะพ ะผะตััั†ะฐ! ะŸั€ะธั‡ะธะฝะฐ: %s' % e)\n\t\t\t\twhFixM = False\n\t\telse:\n\t\t\tlogging.error(u'ะะต ัƒะดะฐะปะพััŒ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ั‡ั‚ะตะฝะธะต ะทะฐั„ะธะบัะธั€ะพะฒะฐะฝะฝั‹ั… ะฟะพะบะฐะทะฐะฝะธะน ะฟั€ะธะฑะพั€ะฐ ัƒั‡ะตั‚ะฐ %s ะฝะฐ ะฝะฐั‡ะฐะปะพ ะผะตััั†ะฐ!' % str(whAdr))\n\t\t\twhFixM = False\n\t\treturn whFixM", "def test_invalid_dob(self):\n file = SimpleUploadedFile(\n \"test.csv\",\n b\"msisdn,facility code,id type,messaging consent,edd year,edd month,\"\n b\"edd day,dob year,dob month,dob day,baby dob year,baby dob month,\"\n b\"baby dob day\\n\"\n b\"+27820001001,123456,none,true,2021,2,3,1990,2,29,,,\\n\",\n )\n form = MomConnectImportForm(\n data={\"source\": \"MomConnect Import\"}, files={\"file\": file}\n )\n instance = form.save()\n self.assertEqual(instance.status, MomConnectImport.Status.ERROR)\n [error] = instance.errors.all()\n self.assertEqual(\n error.error,\n \"Failed validation: Invalid date of birth date, day is out of range for \"\n \"month\",\n )" ]
[ "0.5468577", "0.5368627", "0.5341315", "0.5172529", "0.50481325", "0.50441164", "0.5039868", "0.503057", "0.503057", "0.49822402", "0.49742725", "0.49465922", "0.4936865", "0.49367988", "0.49274474", "0.4890884", "0.48314846", "0.47565252", "0.47438386", "0.47341046", "0.47319353", "0.47260305", "0.47102505", "0.4701934", "0.4684143", "0.46801397", "0.46703222", "0.46560258", "0.4647345", "0.46386966", "0.4633785", "0.45887595", "0.4588009", "0.45811504", "0.4581142", "0.45596954", "0.45585263", "0.4548425", "0.45474517", "0.45410293", "0.45325446", "0.45048976", "0.45041913", "0.44976136", "0.44968033", "0.44938657", "0.44852555", "0.4472463", "0.44592482", "0.44587913", "0.44557276", "0.4440274", "0.44381502", "0.44331783", "0.4423051", "0.4420288", "0.43960202", "0.43944326", "0.4392661", "0.43839484", "0.4381434", "0.43769145", "0.43642813", "0.43598035", "0.4352795", "0.43451974", "0.4339125", "0.43389392", "0.43367323", "0.43359634", "0.43273383", "0.4326452", "0.4314729", "0.4312919", "0.43120927", "0.43120927", "0.4307294", "0.43066812", "0.42949066", "0.42864957", "0.42845935", "0.4278558", "0.42776832", "0.42747042", "0.42702314", "0.4268767", "0.4265623", "0.42621794", "0.42546865", "0.42536235", "0.42513457", "0.4250305", "0.42403927", "0.42401573", "0.42275086", "0.42267945", "0.42266995", "0.4226293", "0.4224144", "0.42128253", "0.42044854" ]
0.0
-1
Send boundary values EMonth field (EMonth= 00)
def test_21(self): assert 'False' == Api.requestBlock('test-21')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_Month(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Month', value)", "def month(self):\n return 0", "def month(self):\n return 0", "def effective_invoice_month(self) -> pulumi.Input['GoogleTypeDateArgs']:\n return pulumi.get(self, \"effective_invoice_month\")", "def _build_efem_month(self,list_efem_month):\n\n def add_value_dict(key, dict_data, msj=None):\n if key in dict_data.keys():\n dict_data[key] = dict_data[key] + [msj]\n else:\n dict_data[key] = [msj]\n return dict_data\n\n result_data = dict()\n for efem in list_efem_month:\n datetime_efem = efem.date_efem\n result_data = add_value_dict(str(datetime_efem.day), result_data, efem.msj_efem)\n return result_data", "def set_start_month(self, month):\n return self.form.set_value(\"output period \\\"month from\\\"\", MONTHS[month - 1])", "def month_adj():\n\n user_id = current_identity.id\n dayDate = request.form.get(\"dayDate\")\n newVal = request.form.get(\"newVal\")\n elemName = request.form.get(\"ElemName\")\n\n day = parse_day(dayDate)\n month = parse_month(dayDate)\n year = parse_year(dayDate)\n\n commit_adj_to_db(user_id, day, month, year, newVal, elemName)\n\n response = {\"status\" : \"ok\"}\n\n return jsonify(response)", "def setMonth(self, *args):\n return _libsbml.Date_setMonth(self, *args)", "def set_finish_month(self, month):\n return self.form.set_value(\"output period \\\"month to\\\"\", MONTHS[month - 1])", "def month(self, month):\n\n self._month = month", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"MTM\", MTM_MINS_COST)\n self.bill.add_fixed_cost(MTM_MONTHLY_FEE)", "def test_monthly_report_error(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.expense)\n self.assertEqual(res.status_code, 201)\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=\n {'name': 'soda', 'amount': 200, 'date_of_expense': '10-01-2021'})\n month = 4567\n res = self.client().get(f'/monthly_report?month={month}', headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(res.status_code, 400)\n results = json.loads(res.data)\n self.assertEqual(results['message'], f'The date {month} does not match the format MM-YYYY')", "def month_digit(self, year, month, day):\n if (month[0] == '0' and len(month) > 2 or\n day.isdigit() and day[0] == '0' and len(day) > 2):\n Input.change_display(self, self.entries[4],\n 'Remove preceding zeros')\n else:\n Input.day_of_months(self, year, month, day.lstrip('0'))", "def set_month(self, month):\r\n\t\tmonths = ['Enero', 'Febrero', 'Marzo', 'Abril',\r\n\t\t\t\t 'Mayo', 'Junio', 'Julio', 'Agosto'\r\n\t\t\t\t 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre']\r\n\t\tfor i in range(12):\r\n\t\t\tif month == i: \r\n\t\t\t\treturn months[i-1]", "def get_month():\n return handle_invalid_inputs(question_3, months)", "def test_invalid_month_orig(self):\n year, month, error = clean_year_month(2014, 3, 13)\n self.assertEqual(year, 2014)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def set_month(self, month):\n # if the sting value of month is correct then we transform it into an\n # integer\n if isinstance(month, str):\n if month in MONTH_STR:\n month_int = MONTH_STR.index(month) + 1\n else:\n raise ValueError(\"Weekday as a string can only take the value {}\".format(MONTH_STR))\n else:\n month_int = month\n\n # Check if month_int in good range\n if month_int not in range(1, 13):\n raise ValueError(\"Month value must be in range [1..12] but is {}\".format(month_int))\n\n # First we separate the tens and the digit\n tens, digit = divmod(int(month_int), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to the register\n self.__write_register(_REGISTER_MONTH, reg_value)", "def test_valid_month(self):\n ar_month = self.ar[2009][11]\n self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))", "def month():\n \n # get month entered by user - if no month entered default to current month\n month = request.args.get(\"month\", datetime.now().strftime(\"%Y-%m\"))\n \n # get budget data for month as a dictionary\n data = budget_data(month)\n \n return json.dumps(data)", "def _set_month(self, month) -> bool:\n if self.set_start_month(month) is False:\n return False\n return self.set_finish_month(month)", "def eomday(year, month):\n if hasattr(year, '__iter__'):\n assert hasattr(month, '__iter__')\n return np.array([calendar.monthrange(y, m)[-1] for y, m in zip(year, month)])\n else:\n return calendar.monthrange(year, month)[-1]", "def process_month(self):\n if self.balance > 0:\n # if positive balance, convert APR to monthly multiplicative factor\n monthly_factor = pow(1 + self.apr, 1 / 12)\n self.balance *= monthly_factor", "def monthly_schedule(self,month):\n response = requests.get(f'http://company.com/{self.lname}/{month}')\n if response.ok:\n return response.text\n else:\n return 'Bad Response!'", "def month_days():\n user_id = current_identity.id\n month = request.args.get(\"month\")\n year = request.args.get(\"year\")\n \n if user_id:\n \n response = {\n \"status\": None,\n \"dateArray\" : []\n }\n possibleDateArr = format_dayArray(month, year, user_id)\n\n if not possibleDateArr:\n return jsonify({\"status\" : \"error\"})\n\n response[\"dateArray\"] = possibleDateArr\n \n response[\"status\"] = \"ok\"\n \n return jsonify(response)", "def mm(self):\n return '%02d' % self._month", "def day_of_month(self, day_of_month):\n\n self._day_of_month = day_of_month", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"TERM\", TERM_MINS_COST)\n self.bill.add_fixed_cost(TERM_MONTHLY_FEE)\n if not ((self.end.month < month and self.end.year <= year) or\n self.end.year < year):\n # refresh included minutes and SMSs\n self.bill.add_free_minutes((-1) * self.bill.free_min)\n if self.start.month == month and self.start.year == year:\n # if first month, add term deposit to bill.\n self.bill.add_fixed_cost(TERM_DEPOSIT)\n else:\n self._carried_term = True", "def month(m=0):\n if not 1 <= m <= 12:\n # throw error\n return jsonify({}), status.HTTP_400_BAD_REQUEST\n holidays = Holidays.query.filter_by(month=m).all()\n\n this_month = {}\n for h in holidays:\n this_month[h.day] = this_month.get(h.day, []) + [h.holiday]\n\n return jsonify({\"month\": m, \"holidays\": this_month})", "def eme_500day_2018(self):\n a = de421_planets()\n self.FBseq = a.eme_500day_2018(self.MARS_DIST)\n self.N = len(self.FBseq)\n self.partials = 'eme_500day_2018_single'\n self.finals = 'eme_500day_2018_multi'", "def month_content():\n \n user_id = current_identity.id\n month = request.form.get(\"month\")\n year = request.form.get(\"year\")\n \n if not is_month(month, year, user_id):\n establish_month(month, year, user_id)\n\n dayContentDict = format_day_content(month, year, user_id)\n\n d = collections.defaultdict(dict)\n response = {\n \"status\": \"ok\",\n \"dayContent\" : d\n }\n \n if dayContentDict:\n response[\"dayContent\"] = dayContentDict\n \n return jsonify(response)", "def aMonth(self):\n return self._amon", "def add_month(cab_data):\n return cab_data.assign(month=lambda x: x.time.dt.month)", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"PREPAID\", PREPAID_MINS_COST)\n if self.balance > (-10.0):\n self.balance += (-25.0)\n self.bill.add_fixed_cost(self.balance)", "def setIndexMonth(self,index):\n self.indexMonth = index", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n raise NotImplementedError", "def get_end_month(month):\n return datetime(2020, month, 28)", "def _get_eur_gbp_last_month(self) -> None:\n last_month = _last_month()\n data = _get_ecb_data(FREQUENCY_MONTHLY, last_month, last_month)\n\n self.eur_gbp_last_month = _get_latest_ecb_rate(data)", "def month(self):\n return self.__month", "def decrement_month(self):\n month: int = int(self.month)\n month -= 1\n if month == 0:\n month == 12\n year: int = int(self.year)\n year -= 1\n self.year = str(year)\n self.month = str(month)\n if len(self.month) == 1:\n self.month = \"0\" + self.month", "def test_put_wrong_data(self):\n new_data = {\"fromMonth\": \"another\"}\n response = self.client.put(self.url + str(self.current_data[-1]['id']) + '/', data=json.dumps(new_data),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())", "def calendar_month(year, month):\n start = datetime.datetime(year, month, 1)\n if month == 12:\n end = datetime.datetime(year+1, 1, 1)\n else:\n end = datetime.datetime(year, month+1, 1)\n print(start)\n print(end)\n return start, end", "def set_value(self, value):\n\t\tassert len(value.split('-')) > 1, 'A minimum of year and month are needed.'\n\t\tself.values = map(int, value.split('-'))", "def _set_value_date_32A(self, val):\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount = val\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount.swiftTag = \"32A\"", "def test_march_plus_zero(self):\n month, year = get_month_from_offset(3, 2000, 0)\n self.assertEqual(month, 3)\n self.assertEqual(year, 2000)", "def monthly_day(self, monthly_day):\n\n self._monthly_day = monthly_day", "def __init__(__self__, *,\n day_of_month: pulumi.Input[int],\n hand_off_time: pulumi.Input[str]):\n pulumi.set(__self__, \"day_of_month\", day_of_month)\n pulumi.set(__self__, \"hand_off_time\", hand_off_time)", "def set_M(self, morb):\n if not (0 < morb <= 12):\n raise ValueError\n self.set_par('Df_cnf_Fock', '')\n self.set_par('MORB', morb)", "def test_parameter_checking(self):\n params = ['12-2015', '12-15', '2015_12', '2015.12', '12.2015', '12/2015', '12/15', '2015-00', '2015-13']\n # list of examples of possible wrong month parameters\n for month in params:\n with self.assertRaises(ValueError) as cm: # ValueError should be raised with proper message\n download_data(month)\n self.assertEqual('Month parameter should be in form `yyyy-mm`', cm.exception.args[0], msg=month)\n # check if error message was as expected", "def month(self):\n return self._months", "def pMonth(self):\n return self._pmon", "def __getMonth(self,xml):\n\t\t#TODO: Monat, Jahr, SollStunden, Urlaub,ZeitdiffAkt, ZeitdiffVor, erweitert\n\t\tdayTypeMapping = {'Arbeitstag': DayType.work,\n\t\t\t\t\t\t'Wochenende': DayType.weekend,\n\t\t\t\t\t\t'Urlaub': DayType.vacation,\n\t\t\t\t\t\t'Feiertag': DayType.holiday,\n\t\t\t\t\t\t'Krankheit': DayType.illness,\n\t\t\t\t\t\t'รœberstunden genommen': DayType.overtime_free,\n\t\t\t\t\t\t'Dienstreise': DayType.business_trip,\n\t\t\t\t\t\t'Freistellung': DayType.unpaid_free}\n\t\tworkdays = {}\n\t\tmonthNum = int(xml.find('Monat').text)\n\t\tyearNum = int(xml.find('Jahr').text)\n\t\tif xml.find('Erweitert').text == 'true':\n\t\t\textendedFormat = True\n\t\telse:\n\t\t\textendedFormat = False\n\t\tfor panday in xml.findall('Tag'):\n\t\t\t# parse\n\t\t\tnumday = int(panday.find('Datum').text)\n\t\t\tdaytype = panday.find('TagesTyp').text\n\t\t\tdescription = panday.find('Bemerkung').text\n\t\t\tmorning = panday.find('Vormittag').text\n\t\t\tafternoon = panday.find('Nachmittag').text\n\t\t\tif extendedFormat:\n\t\t\t\tthird = panday.find('Dritte').text\n\t\t\t\tfourth = panday.find('Vierte').text\n\t\t\telse:\n\t\t\t\tthird = None\n\t\t\t\tfourth = None\n\t\t\t# convert\n\t\t\tdaytype = dayTypeMapping[daytype]\n\t\t\tmorning = self. _parsePANTimeRange(morning)\n\t\t\tafternoon = self. _parsePANTimeRange(afternoon)\n\t\t\tthird = self. _parsePANTimeRange(third)\n\t\t\tfourth = self. _parsePANTimeRange(fourth)\t\t\t\n\t\t\ttimeblocks = [morning, afternoon, third, fourth]\n\t\t\ttimeblocks = list(filter(None, timeblocks))\n\t\t\t# save\n\t\t\tday = WorkDay(daytype, description, timeblocks)\n\t\t\tworkdays[numday] = day\n\t\tmonth = WorkMonth(yearNum,monthNum,workdays)\n\t\treturn month", "def month(self):\n return self._month", "def month(self):\n return self._month", "def set_statement_received_date(self, received_date):\n if received_date != \"\":\n self.set_value_into_input_field(self.received_date_locator, received_date)\n else:\n received_date_to_set = self.first_day_of_current_month\n self.set_value_into_input_field(self.received_date_locator, received_date_to_set)", "def __month(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"month\",\n operand1=self,\n operand2=None\n )", "def test_bad_quarter_or_month(self):\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"12/2016\",\n \"reporting_period_end_date\": \"13/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n # make sure date checks work as expected for an existing submission\n \"existing_submission_id\": self.status_check_submission_id,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"AB/2016\",\n \"reporting_period_end_date\": \"CD/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"Q1/ABCD\",\n \"reporting_period_end_date\": \"Q2/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])", "def fix_single_digit_month(connection):\n _update_date_by_regexp(connection=connection,\n regexp=\"^[0-9]{1}/[0-9]{1,2}/[0-9]{4}$\",\n new_value=\"CONCAT('0', cav.attribute_value)\")", "def get_main_date(self, kwargs):\n month = int(kwargs['month'])\n if month >= 9 and month <= 12:\n main_date = datetime(int(kwargs['year_from']), month, 1)\n else:\n main_date = datetime(int(kwargs['year_to']), month, 1)\n return main_date", "def setAnchorDateMonth(self, value):\n normalizedMonth = value - 1\n return self._set(anchorDateMonth=normalizedMonth)", "def day_of_months(self, year, month, day):\n if month.isdigit() and int(month) < 13:\n if (int(month) in [1,3,5,7,8,10,12]):\n Input.condition(self, year, month, day, '31', '')\n elif (int(month) in [4,6,9,11]):\n Input.condition(self, year, month, day, '30', '')\n elif int(month) == 2:\n if (((int(year) % 4) == 0 and\n not (int(year) % 100) == 0)\n or (int(year) % 400) == 0):\n if int(year) == 1712 and int(day) == 30:\n \"\"\"Easter Egg.\"\"\"\n Input.condition(self, year, month, day, '30','')\n Input.special_case(self)\n else:\n Input.condition(self, year, month, day, '29',' ')\n else:\n Input.condition(self, year, month, day, '28', '29')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter month between 1-12 or month name')", "def test_invalid_out_of_bounds_year(self):\n year, month, error = clean_year_month(2014, 100000, 1)\n self.assertEqual(year, now.year)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def test_cells_charts_post_chart_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_value_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def months(self, months):\n allowed_values = [\"january\", \"feburary\", \"march\", \"april\", \"may\", \"june\", \"july\", \"august\", \"september\", \"october\", \"november\", \"december\"] # noqa: E501\n if not set(months).issubset(set(allowed_values)):\n raise ValueError(\n \"Invalid values for `months` [{0}], must be a subset of [{1}]\" # noqa: E501\n .format(\", \".join(map(str, set(months) - set(allowed_values))), # noqa: E501\n \", \".join(map(str, allowed_values)))\n )\n\n self._months = months", "def test_date_accept_this_month(self):\n spi_search = \"find date this month\"\n inv_search = \"year:\" + datetime.datetime.strftime(datetime.datetime.today(), '%Y-%m')\n self._compare_searches(inv_search, spi_search)", "def Month(self):\n return self._fmon", "def sendM(self):\n startM = self.countMissionaryOnStart()\n if startM < 1:\n return None\n else:\n newStart = str(startM-1) + self.start[1:]\n newEnd = str(4-startM) + self.end[1:]\n return MissionaryState(newStart,newEnd,\"sendM\")", "def changeDisplayedMonth(self):\n #ho bisogno di sapere qual รจ il mese mostrato\n currentMonth = self.indexMonth\n currentYear = self.currentYear\n\n sender = self.sender().objectName()\n if sender == 'bot_next':\n # if currentMonth < 11:\n if self.indexMonth < 11:\n self.indexMonth += 1\n self.setBaseDate(self.baseDate.addMonths(1))\n else:\n self.indexMonth = 0\n self.setCurrentYear(currentYear+1)\n # print('baseDate before', self.baseDate)\n self.setBaseDate(self.baseDate.addMonths(1))\n # print('baseDate after', self.baseDate)\n # print('new Year: ', self.currentYear)\n\n elif sender == 'bot_prev':\n # if currentMonth > 0:\n if self.indexMonth > 0:\n self.indexMonth -= 1\n self.setBaseDate(self.baseDate.addMonths(-1))\n else:\n self.indexMonth = 11\n self.setCurrentYear(currentYear-1)\n self.setBaseDate(self.baseDate.addMonths(-1))\n # print('new Year: ', self.currentYear)\n if currentMonth != self.indexMonth:\n # print(f'currentPageChanged.emit({self.indexMonth})')\n self.currentPageChanged.emit(self.indexMonth)\n self.combo_mesi.setCurrentIndex(self.indexMonth)\n if currentYear != self.currentYear:\n # print('current year changed')\n self.setListaGiorniDellAnno(self.createDates(self.baseDate), self.indexMonth)", "def cc_expire_months():\n months = []\n for month in range(1, 13):\n if len(str(month)) == 1:\n numeric = '0' + str(month)\n else:\n numeric = str(month)\n months.append((numeric, datetime.date(2009, month, 1).strftime('%B')))\n return months", "def GetAvailabilityForMonth(year, month, VerkehrstageHex):\r\n\t# Get Last Day of the Month in Python\r\n\t# http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python\r\n\t\"\"\"\r\n\tmonthrange(year, month):\r\n Returns weekday of first day of the month and number of days in month\r\n \"\"\"\r\n\tDayRange = calendar.monthrange(year,month)\r\n\tMonthEndDay = DayRange[1]\r\n\r\n\tStartDate = date(year,month,1)\r\n\tEndDate = date(year,month,MonthEndDay)\t\t\r\n\treturn GetAvailabilityBetweenDates(StartDate, EndDate, VerkehrstageHex)", "def get_api_info_option_month_and_underlying_month_r(\n self,\n\n headers: t.Dict[str, str] = None,\n body: JSONEncodable = None,\n fields_data: t.Dict[str, str] = None,\n **kwargs\n ):\n r = self._do_call(\n method='GET',\n url=f'{self.API_BASE_URL}/info/option-month-and-underlying-month',\n headers=headers,\n body=body,\n fields=fields_data,\n **kwargs\n )\n return r", "def month(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"month\")", "def set_start():\n app.logger.debug(\"Got a JSON set_start post\");\n global dateFormat\n reply = {}\n\n flask.session[\"bStart\"] = request.form[\"bStart\"]\n flask.session[\"bLength\"] = request.form[\"bLength\"]\n bLength = int(request.form[\"bLength\"])\n try:\n start = arrow.get(flask.session[\"bStart\"], \"YYYY/MM/DD HH:mm\")\n except:\n reply[\"message\"] = \"Bad date Time.\"\n return jsonify(result=reply)\n \n brevet = AcpBrevet(bLength, start)\n open_limit = brevet.calc_open(0,bLength)\n close_limit = brevet.calc_close(0,bLength)\n\n reply[\"message\"] = \"Start of event and length set.\"\n reply[\"open\"] = open_limit.format(dateFormat)\n reply[\"close\"] = close_limit.format(dateFormat)\n return jsonify(result=reply)", "def get_month(x):\n return x[\"SALE DATE\"].month", "def MONTH(\n serial_number: func_xltypes.XlNumber\n) -> func_xltypes.XlNumber:\n\n date = utils.number_to_datetime(int(serial_number))\n return int(date.strftime(\"%m\"))", "def whFixMonth(self, whAdr=0, whT=0, month=int(datetime.now().strftime(\"%m\"))):\n\t\twhFixM = False\n\t\tvalAdr = {\n\t\t\t1:{0:'\\x02\\xAA', 1:'\\x02\\xBB', 2:'\\x02\\xCC', 3:'\\x02\\xDD', 4:'\\x02\\xEE'},\n\t\t\t2:{0:'\\x02\\xFF', 1:'\\x03\\x10', 2:'\\x03\\x21', 3:'\\x03\\x32', 4:'\\x03\\x43'},\n\t\t\t3:{0:'\\x03\\x54', 1:'\\x03\\x65', 2:'\\x03\\x76', 3:'\\x03\\x87', 4:'\\x03\\x98'},\n\t\t\t4:{0:'\\x03\\xA9', 1:'\\x03\\xBA', 2:'\\x03\\xCB', 3:'\\x03\\xDC', 4:'\\x03\\xED'},\n\t\t\t5:{0:'\\x03\\xFE', 1:'\\x04\\x0F', 2:'\\x04\\x20', 3:'\\x04\\x31', 4:'\\x04\\x42'},\n\t\t\t6:{0:'\\x04\\x53', 1:'\\x04\\x64', 2:'\\x04\\x75', 3:'\\x04\\x86', 4:'\\x04\\x97'},\n\t\t\t7:{0:'\\x04\\xA8', 1:'\\x04\\xB9', 2:'\\x04\\xCA', 3:'\\x04\\xDB', 4:'\\x04\\xEC'},\n\t\t\t8:{0:'\\x04\\xFD', 1:'\\x05\\x0E', 2:'\\x05\\x1F', 3:'\\x05\\x30', 4:'\\x05\\x41'},\n\t\t\t9:{0:'\\x05\\x52', 1:'\\x05\\x63', 2:'\\x05\\x74', 3:'\\x05\\x85', 4:'\\x05\\x96'},\n\t\t\t10:{0:'\\x05\\xA7', 1:'\\x05\\xB8', 2:'\\x05\\xC9', 3:'\\x05\\xDA', 4:'\\x05\\xEB'},\n\t\t\t11:{0:'\\x05\\xFC', 1:'\\x06\\xD0', 2:'\\x06\\x1E', 3:'\\x06\\x2F', 4:'\\x06\\x40'},\n\t\t\t12:{0:'\\x06\\x51', 1:'\\x06\\x62', 2:'\\x06\\x73', 3:'\\x06\\x84', 4:'\\x06\\x95'}\n\t\t}\n\t\twhFixMonthCmd = chr(whAdr) + '\\x06\\x02' + valAdr[month][whT] + '\\x10'\n\t\tlogging.info(u'ะงั‚ะตะฝะธะต ะฟะพะบะฐะทะฐะฝะธะน ะฟั€ะธะฑะพั€ะฐ ัƒั‡ะตั‚ะฐ ะฝะฐ ะฝะฐั‡ะฐะปะพ ะผะตััั†ะฐ: %s' % str(whAdr))\n\t\tans = self.cmdWR(whFixMonthCmd)\n\t\tif ans and self._whAnsCheck(whAdr, ans):\n\t\t\ttry:\n\t\t\t\twhFixMA = int(ans[2] + ans[1] + ans[4] + ans[3], 16) * 0.0005\n\t\t\t\twhFixMR = int(ans[10] + ans[9] + ans[12] + ans[11], 16) * 0.0005\n\t\t\t\twhFixM = {'A':whFixMA, 'R':whFixMR}\n\t\t\texcept Exception, e:\n\t\t\t\tlogging.error(u'ะะต ัƒะดะฐะปะพััŒ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ั‡ั‚ะตะฝะธะต ะทะฐั„ะธะบัะธั€ะพะฒะฐะฝะฝั‹ั… ะฟะพะบะฐะทะฐะฝะธะน ะฟั€ะธะฑะพั€ะฐ ัƒั‡ะตั‚ะฐ ะฝะฐ ะฝะฐั‡ะฐะปะพ ะผะตััั†ะฐ! ะŸั€ะธั‡ะธะฝะฐ: %s' % e)\n\t\t\t\twhFixM = False\n\t\telse:\n\t\t\tlogging.error(u'ะะต ัƒะดะฐะปะพััŒ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ั‡ั‚ะตะฝะธะต ะทะฐั„ะธะบัะธั€ะพะฒะฐะฝะฝั‹ั… ะฟะพะบะฐะทะฐะฝะธะน ะฟั€ะธะฑะพั€ะฐ ัƒั‡ะตั‚ะฐ %s ะฝะฐ ะฝะฐั‡ะฐะปะพ ะผะตััั†ะฐ!' % str(whAdr))\n\t\t\twhFixM = False\n\t\treturn whFixM", "def parse_month(self, response):\n month, year = response.meta[\"month\"], response.meta[\"year\"]\n dates = response.json().get(\"data\", [])\n\n for gazette_name in dates.values():\n date = re.search(self.DATE_REGEX, gazette_name).group()\n\n if date is None:\n continue\n\n date = parse(date, settings={\"DATE_ORDER\": \"DMY\"}).date()\n\n if date < self.start_date:\n continue\n\n url = f\"{self.GAZETTE_URL}?dir={year}/{month}/{gazette_name}\"\n yield Request(url, callback=self.parse_gazette)", "def month(self) -> str:\r\n return self._month", "def E_Dynamic_MavkoEtAl2009(rhob,DTS,PR):\n E = (2*(rhob*1000)*((304800/DTS)**2)*(1+PR))/1000000\n return E", "def _get_months(self, cr, uid, context):\n months=[(str(n),str(n)) for n in range(1,13)]\n return months", "def month(self) -> int:\n return self.arxiv_id.month", "def test_cells_charts_post_chart_series_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_series_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def test_cells_charts_post_chart_second_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_second_value_axis(name, sheet_name,chartIndex, axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def _boundary_value(self) -> str:\n ...", "def news_for_month(self):\n\n raise NotImplementedError", "def mapMaufromByte(self, date, bytes):\n sMonth = date.strftime(self.config.MONTH_FORMAT)\n reKey = self.config.dau_keys_conf['mau'].format(month=sMonth)\n redis_cli = self.get_redis_cli()\n logging.debug('Save mau from bytes: %s' % reKey)\n redis_cli.set(reKey, bytes)", "def month(self) -> int:\n if self.is_old_style:\n return int(self.split('/', 1)[1][2:4])\n return int(self[2:4])", "def set_period(self, yearmonth):\n if not isinstance(yearmonth, int):\n yearmonth = int(yearmonth)\n year = int(yearmonth / 100)\n if self._set_year(year) is False:\n return False\n return self._set_month(yearmonth % year)", "def _build_data_result(self, efem, list_efem):\n result_data = dict()\n result_data[CURRENT_DAY] = list(efem.values())[0]['msj_efem']\n result_data[MONTH] = dict()\n result_data[MONTH] = self._build_efem_month(list_efem)\n return result_data", "def __get_step1_end_month(yaml_content: dict) -> str:\n\n end_month = None\n\n try:\n end_month = yaml_content['step1.end_month']\n except KeyError as exc:\n print(ConfigurationFactory.__get_key_missing_error_message(exc))\n\n return end_month", "def comp_month_range():\n word_months = _(\"months\")\n word_month = _(\"month\")\n COMP_MONTH_LIST = (\n (12, '- 12 ' + word_months),\n (11, '- 11 ' + word_months),\n (10, '- 10 ' + word_months),\n (9, '- 9 ' + word_months),\n (8, '- 8 ' + word_months),\n (7, '- 7 ' + word_months),\n (6, '- 6 ' + word_months),\n (5, '- 5 ' + word_months),\n (4, '- 4 ' + word_months),\n (3, '- 3 ' + word_months),\n (2, '- 2 ' + word_months),\n (1, '- 1 ' + word_month),\n )\n return COMP_MONTH_LIST", "def __init__(self, new_month, new_day, new_year):\n self.month = new_month\n self.day = new_day\n self.year = new_year", "def get_month(self, indate):\n return indate.strftime(\"%B\") + \"-\" + indate.strftime(\"%Y\")", "def _handleRequestPostChargeParameters(self, data):\r\n print(\"\\\"Request Post Charge Parameters\\\" received\")\r\n message = self.whitebeet.v2gParseRequestPostChargeParameters(data)\r\n if 'dc' in message:\r\n print(\"SOC: {}%\".format(message['dc']['soc']))\r\n try:\r\n self.whitebeet.v2gSetDcPostChargeParameters(0, 1, int(self.charger.getEvsePresentVoltage()))\r\n except Warning as e:\r\n print(\"Warning: {}\".format(e))\r\n except ConnectionError as e:\r\n print(\"ConnectionError: {}\".format(e))", "def month(self, month: str):\n return get_from_list(self.months, \"month\", month)", "def check_dates(self, kwargs):\n month = int(kwargs['month'])\n if (int(kwargs['year_from']) >= int(kwargs['year_to'])) or \\\n (month < 1 or month > 12):\n # kdyby datumy byly nejake dodrbane, tak se sverime do pece autoredirectu\n return HttpResponseRedirect(reverse('admin_redir'))\n return None", "def problem3_3(month, day, year):\r\n \r\n months = (\"January\", \"February\", \"March\",\"April\",\"May\",\"June\",\"July\",\\\r\n \"August\",\"September\",\"October\",\"November\",\"December\")\r\n month = month - 1 \r\n Month_prin = months[month]\r\n Date_print = Month_prin + \" \" + str(day) + \",\" + \" \" +str(year)\r\n print(Date_print)", "def EDATE(start_date, months):\n return DATE(start_date.year, start_date.month + months, start_date.day)", "def test_get_occurrences_monthly_mid_month(self):\n print()\n print(\"Get occurrences of a monthly expense between:\")\n expense = BudgetExpense.objects.get(id = 100)\n start_date = expense.start_date\n end_date = start_date + timedelta(days = 40)\n print(start_date.strftime(\"%B %d, %y\")+\" and \"+end_date.strftime('%B %d, %y'))\n print(\"======================================\")\n result = get_anticipated_transaction_occurences(anticipated_transaction= expense, start_date = start_date, end_date = end_date)\n result_dates = []\n for current_expense in result.keys():\n print(current_expense)\n print(\"========================\")\n result_dates.extend(result.get(current_expense))\n for current_date in result_dates:\n print(\"Date: \"+current_date.strftime(\"%B %d, %y %T\"))\n print(\"======================\")\n print()\n date_1 = start_date\n days_in_month = monthrange(start_date.year, start_date.month)[1]\n date_2 = start_date + timedelta(days = days_in_month)\n \n self.assertEquals([date_1, date_2], result_dates)", "def month(self):\n return gocept.month.Month(self.calendar_month, self.calendar_year)", "def calendarPageChanged(self, year, month):\n success = self.porker_thread.extendDates(datetime.date(year, month, 1))\n #if not success:\n # self.alertMessage(\"Failure!\",\"Unable to extend the thread's dates for some reason.\")\n #efficiency = self.porker_thread.getEfficiencyFor(self.getActiveDate())\n #self.porker_thread.sentDatesData = False", "def __next_month(self, year, month):\n year, month = (year, month + 1) if month < 12 else (year + 1, 1)\n\n return self.create(year, month)" ]
[ "0.5586388", "0.5486472", "0.5486472", "0.52544075", "0.52388316", "0.52369255", "0.5221326", "0.511121", "0.51008004", "0.50837576", "0.5038402", "0.50014496", "0.49831936", "0.4924879", "0.49113798", "0.4900271", "0.48797706", "0.4850321", "0.48187506", "0.48154077", "0.48068428", "0.4791563", "0.4791332", "0.47902307", "0.4779765", "0.47488454", "0.4742299", "0.47362733", "0.47260347", "0.47237912", "0.47115922", "0.46985316", "0.46945426", "0.46835485", "0.46662584", "0.46478269", "0.46251255", "0.46048158", "0.45886803", "0.45805392", "0.4580371", "0.4579317", "0.45747238", "0.45717144", "0.45631507", "0.4547092", "0.4543418", "0.4535243", "0.45010933", "0.4499499", "0.44644716", "0.44523805", "0.44523805", "0.44467047", "0.4442561", "0.4432525", "0.44291133", "0.44216424", "0.44206458", "0.4419297", "0.4408503", "0.44012472", "0.4400569", "0.44004825", "0.43997708", "0.43753338", "0.43745252", "0.43711665", "0.43705893", "0.43659022", "0.4354287", "0.435188", "0.43509018", "0.43417427", "0.4341229", "0.43394297", "0.4336478", "0.43346", "0.43309805", "0.43217847", "0.43167427", "0.43068534", "0.43059757", "0.43048242", "0.42952937", "0.4287715", "0.4287199", "0.42764825", "0.42741284", "0.42738798", "0.42705828", "0.42637408", "0.4262457", "0.42571387", "0.42521322", "0.42445788", "0.4243305", "0.4242668", "0.4237182", "0.42359754", "0.4228784" ]
0.0
-1
Send boundary values EMonth field (EMonth= 01)
def test_22(self): assert 'True' == Api.requestBlock('test-22')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_Month(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Month', value)", "def _build_efem_month(self,list_efem_month):\n\n def add_value_dict(key, dict_data, msj=None):\n if key in dict_data.keys():\n dict_data[key] = dict_data[key] + [msj]\n else:\n dict_data[key] = [msj]\n return dict_data\n\n result_data = dict()\n for efem in list_efem_month:\n datetime_efem = efem.date_efem\n result_data = add_value_dict(str(datetime_efem.day), result_data, efem.msj_efem)\n return result_data", "def month(self):\n return 0", "def month(self):\n return 0", "def month_adj():\n\n user_id = current_identity.id\n dayDate = request.form.get(\"dayDate\")\n newVal = request.form.get(\"newVal\")\n elemName = request.form.get(\"ElemName\")\n\n day = parse_day(dayDate)\n month = parse_month(dayDate)\n year = parse_year(dayDate)\n\n commit_adj_to_db(user_id, day, month, year, newVal, elemName)\n\n response = {\"status\" : \"ok\"}\n\n return jsonify(response)", "def set_start_month(self, month):\n return self.form.set_value(\"output period \\\"month from\\\"\", MONTHS[month - 1])", "def effective_invoice_month(self) -> pulumi.Input['GoogleTypeDateArgs']:\n return pulumi.get(self, \"effective_invoice_month\")", "def set_finish_month(self, month):\n return self.form.set_value(\"output period \\\"month to\\\"\", MONTHS[month - 1])", "def month(self, month):\n\n self._month = month", "def setMonth(self, *args):\n return _libsbml.Date_setMonth(self, *args)", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"MTM\", MTM_MINS_COST)\n self.bill.add_fixed_cost(MTM_MONTHLY_FEE)", "def set_month(self, month):\r\n\t\tmonths = ['Enero', 'Febrero', 'Marzo', 'Abril',\r\n\t\t\t\t 'Mayo', 'Junio', 'Julio', 'Agosto'\r\n\t\t\t\t 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre']\r\n\t\tfor i in range(12):\r\n\t\t\tif month == i: \r\n\t\t\t\treturn months[i-1]", "def set_month(self, month):\n # if the sting value of month is correct then we transform it into an\n # integer\n if isinstance(month, str):\n if month in MONTH_STR:\n month_int = MONTH_STR.index(month) + 1\n else:\n raise ValueError(\"Weekday as a string can only take the value {}\".format(MONTH_STR))\n else:\n month_int = month\n\n # Check if month_int in good range\n if month_int not in range(1, 13):\n raise ValueError(\"Month value must be in range [1..12] but is {}\".format(month_int))\n\n # First we separate the tens and the digit\n tens, digit = divmod(int(month_int), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to the register\n self.__write_register(_REGISTER_MONTH, reg_value)", "def test_monthly_report_error(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.expense)\n self.assertEqual(res.status_code, 201)\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=\n {'name': 'soda', 'amount': 200, 'date_of_expense': '10-01-2021'})\n month = 4567\n res = self.client().get(f'/monthly_report?month={month}', headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(res.status_code, 400)\n results = json.loads(res.data)\n self.assertEqual(results['message'], f'The date {month} does not match the format MM-YYYY')", "def monthly_schedule(self,month):\n response = requests.get(f'http://company.com/{self.lname}/{month}')\n if response.ok:\n return response.text\n else:\n return 'Bad Response!'", "def get_month():\n return handle_invalid_inputs(question_3, months)", "def month():\n \n # get month entered by user - if no month entered default to current month\n month = request.args.get(\"month\", datetime.now().strftime(\"%Y-%m\"))\n \n # get budget data for month as a dictionary\n data = budget_data(month)\n \n return json.dumps(data)", "def month_digit(self, year, month, day):\n if (month[0] == '0' and len(month) > 2 or\n day.isdigit() and day[0] == '0' and len(day) > 2):\n Input.change_display(self, self.entries[4],\n 'Remove preceding zeros')\n else:\n Input.day_of_months(self, year, month, day.lstrip('0'))", "def month(m=0):\n if not 1 <= m <= 12:\n # throw error\n return jsonify({}), status.HTTP_400_BAD_REQUEST\n holidays = Holidays.query.filter_by(month=m).all()\n\n this_month = {}\n for h in holidays:\n this_month[h.day] = this_month.get(h.day, []) + [h.holiday]\n\n return jsonify({\"month\": m, \"holidays\": this_month})", "def eomday(year, month):\n if hasattr(year, '__iter__'):\n assert hasattr(month, '__iter__')\n return np.array([calendar.monthrange(y, m)[-1] for y, m in zip(year, month)])\n else:\n return calendar.monthrange(year, month)[-1]", "def month_days():\n user_id = current_identity.id\n month = request.args.get(\"month\")\n year = request.args.get(\"year\")\n \n if user_id:\n \n response = {\n \"status\": None,\n \"dateArray\" : []\n }\n possibleDateArr = format_dayArray(month, year, user_id)\n\n if not possibleDateArr:\n return jsonify({\"status\" : \"error\"})\n\n response[\"dateArray\"] = possibleDateArr\n \n response[\"status\"] = \"ok\"\n \n return jsonify(response)", "def test_valid_month(self):\n ar_month = self.ar[2009][11]\n self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))", "def _set_month(self, month) -> bool:\n if self.set_start_month(month) is False:\n return False\n return self.set_finish_month(month)", "def setIndexMonth(self,index):\n self.indexMonth = index", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"TERM\", TERM_MINS_COST)\n self.bill.add_fixed_cost(TERM_MONTHLY_FEE)\n if not ((self.end.month < month and self.end.year <= year) or\n self.end.year < year):\n # refresh included minutes and SMSs\n self.bill.add_free_minutes((-1) * self.bill.free_min)\n if self.start.month == month and self.start.year == year:\n # if first month, add term deposit to bill.\n self.bill.add_fixed_cost(TERM_DEPOSIT)\n else:\n self._carried_term = True", "def process_month(self):\n if self.balance > 0:\n # if positive balance, convert APR to monthly multiplicative factor\n monthly_factor = pow(1 + self.apr, 1 / 12)\n self.balance *= monthly_factor", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n raise NotImplementedError", "def add_month(cab_data):\n return cab_data.assign(month=lambda x: x.time.dt.month)", "def month_content():\n \n user_id = current_identity.id\n month = request.form.get(\"month\")\n year = request.form.get(\"year\")\n \n if not is_month(month, year, user_id):\n establish_month(month, year, user_id)\n\n dayContentDict = format_day_content(month, year, user_id)\n\n d = collections.defaultdict(dict)\n response = {\n \"status\": \"ok\",\n \"dayContent\" : d\n }\n \n if dayContentDict:\n response[\"dayContent\"] = dayContentDict\n \n return jsonify(response)", "def mm(self):\n return '%02d' % self._month", "def aMonth(self):\n return self._amon", "def test_invalid_month_orig(self):\n year, month, error = clean_year_month(2014, 3, 13)\n self.assertEqual(year, 2014)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def eme_500day_2018(self):\n a = de421_planets()\n self.FBseq = a.eme_500day_2018(self.MARS_DIST)\n self.N = len(self.FBseq)\n self.partials = 'eme_500day_2018_single'\n self.finals = 'eme_500day_2018_multi'", "def calendar_month(year, month):\n start = datetime.datetime(year, month, 1)\n if month == 12:\n end = datetime.datetime(year+1, 1, 1)\n else:\n end = datetime.datetime(year, month+1, 1)\n print(start)\n print(end)\n return start, end", "def new_month(self, month: int, year: int, bill: Bill) -> None:\n self.bill = bill\n self.bill.set_rates(\"PREPAID\", PREPAID_MINS_COST)\n if self.balance > (-10.0):\n self.balance += (-25.0)\n self.bill.add_fixed_cost(self.balance)", "def day_of_month(self, day_of_month):\n\n self._day_of_month = day_of_month", "def get_end_month(month):\n return datetime(2020, month, 28)", "def month(self):\n return self.__month", "def monthly_day(self, monthly_day):\n\n self._monthly_day = monthly_day", "def parse_month(self, response):\n month, year = response.meta[\"month\"], response.meta[\"year\"]\n dates = response.json().get(\"data\", [])\n\n for gazette_name in dates.values():\n date = re.search(self.DATE_REGEX, gazette_name).group()\n\n if date is None:\n continue\n\n date = parse(date, settings={\"DATE_ORDER\": \"DMY\"}).date()\n\n if date < self.start_date:\n continue\n\n url = f\"{self.GAZETTE_URL}?dir={year}/{month}/{gazette_name}\"\n yield Request(url, callback=self.parse_gazette)", "def set_value(self, value):\n\t\tassert len(value.split('-')) > 1, 'A minimum of year and month are needed.'\n\t\tself.values = map(int, value.split('-'))", "def pMonth(self):\n return self._pmon", "def __next_month(self, year, month):\n year, month = (year, month + 1) if month < 12 else (year + 1, 1)\n\n return self.create(year, month)", "def day_of_months(self, year, month, day):\n if month.isdigit() and int(month) < 13:\n if (int(month) in [1,3,5,7,8,10,12]):\n Input.condition(self, year, month, day, '31', '')\n elif (int(month) in [4,6,9,11]):\n Input.condition(self, year, month, day, '30', '')\n elif int(month) == 2:\n if (((int(year) % 4) == 0 and\n not (int(year) % 100) == 0)\n or (int(year) % 400) == 0):\n if int(year) == 1712 and int(day) == 30:\n \"\"\"Easter Egg.\"\"\"\n Input.condition(self, year, month, day, '30','')\n Input.special_case(self)\n else:\n Input.condition(self, year, month, day, '29',' ')\n else:\n Input.condition(self, year, month, day, '28', '29')\n else:\n Input.change_display(self, self.entries[4],\n 'Enter month between 1-12 or month name')", "def test_parameter_checking(self):\n params = ['12-2015', '12-15', '2015_12', '2015.12', '12.2015', '12/2015', '12/15', '2015-00', '2015-13']\n # list of examples of possible wrong month parameters\n for month in params:\n with self.assertRaises(ValueError) as cm: # ValueError should be raised with proper message\n download_data(month)\n self.assertEqual('Month parameter should be in form `yyyy-mm`', cm.exception.args[0], msg=month)\n # check if error message was as expected", "def test_cells_charts_post_chart_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_value_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def month(self):\n return self._months", "def get_month(x):\n return x[\"SALE DATE\"].month", "def test_cells_charts_post_chart_series_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_series_axis(name, sheet_name,chartIndex,axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def month(self):\n return self._month", "def month(self):\n return self._month", "def setAnchorDateMonth(self, value):\n normalizedMonth = value - 1\n return self._set(anchorDateMonth=normalizedMonth)", "def GetAvailabilityForMonth(year, month, VerkehrstageHex):\r\n\t# Get Last Day of the Month in Python\r\n\t# http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python\r\n\t\"\"\"\r\n\tmonthrange(year, month):\r\n Returns weekday of first day of the month and number of days in month\r\n \"\"\"\r\n\tDayRange = calendar.monthrange(year,month)\r\n\tMonthEndDay = DayRange[1]\r\n\r\n\tStartDate = date(year,month,1)\r\n\tEndDate = date(year,month,MonthEndDay)\t\t\r\n\treturn GetAvailabilityBetweenDates(StartDate, EndDate, VerkehrstageHex)", "def test_cells_charts_post_chart_second_value_axis(self):\n name ='Book1.xlsx'\n sheet_name ='Sheet4'\n chartIndex = 0 \n folder = \"PythonTest\"\n axis = Axis()\n axis.min_value = 10.0\n result = AuthUtil.Ready(self.api, name, folder)\n self.assertTrue(len(result.uploaded)>0) \n result = self.api.cells_charts_post_chart_second_value_axis(name, sheet_name,chartIndex, axis, folder=folder)\n self.assertEqual(result.code,200)\n pass", "def test_bad_quarter_or_month(self):\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"12/2016\",\n \"reporting_period_end_date\": \"13/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n # make sure date checks work as expected for an existing submission\n \"existing_submission_id\": self.status_check_submission_id,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"AB/2016\",\n \"reporting_period_end_date\": \"CD/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])\n\n update_json = {\n \"cgac_code\": \"020\",\n \"is_quarter\": True,\n \"award_financial\": \"updated.csv\",\n \"reporting_period_start_date\": \"Q1/ABCD\",\n \"reporting_period_end_date\": \"Q2/2016\"}\n update_response = self.app.post_json(\"/v1/submit_files/\", update_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n self.assertEqual(update_response.status_code, 400)\n self.assertIn(\"Date must be provided as\", update_response.json[\"message\"])", "def decrement_month(self):\n month: int = int(self.month)\n month -= 1\n if month == 0:\n month == 12\n year: int = int(self.year)\n year -= 1\n self.year = str(year)\n self.month = str(month)\n if len(self.month) == 1:\n self.month = \"0\" + self.month", "def calendarPageChanged(self, year, month):\n success = self.porker_thread.extendDates(datetime.date(year, month, 1))\n #if not success:\n # self.alertMessage(\"Failure!\",\"Unable to extend the thread's dates for some reason.\")\n #efficiency = self.porker_thread.getEfficiencyFor(self.getActiveDate())\n #self.porker_thread.sentDatesData = False", "def MONTH(\n serial_number: func_xltypes.XlNumber\n) -> func_xltypes.XlNumber:\n\n date = utils.number_to_datetime(int(serial_number))\n return int(date.strftime(\"%m\"))", "def __month(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"month\",\n operand1=self,\n operand2=None\n )", "def test_date_accept_this_month(self):\n spi_search = \"find date this month\"\n inv_search = \"year:\" + datetime.datetime.strftime(datetime.datetime.today(), '%Y-%m')\n self._compare_searches(inv_search, spi_search)", "def month(self) -> int:\n return self.arxiv_id.month", "def changeDisplayedMonth(self):\n #ho bisogno di sapere qual รจ il mese mostrato\n currentMonth = self.indexMonth\n currentYear = self.currentYear\n\n sender = self.sender().objectName()\n if sender == 'bot_next':\n # if currentMonth < 11:\n if self.indexMonth < 11:\n self.indexMonth += 1\n self.setBaseDate(self.baseDate.addMonths(1))\n else:\n self.indexMonth = 0\n self.setCurrentYear(currentYear+1)\n # print('baseDate before', self.baseDate)\n self.setBaseDate(self.baseDate.addMonths(1))\n # print('baseDate after', self.baseDate)\n # print('new Year: ', self.currentYear)\n\n elif sender == 'bot_prev':\n # if currentMonth > 0:\n if self.indexMonth > 0:\n self.indexMonth -= 1\n self.setBaseDate(self.baseDate.addMonths(-1))\n else:\n self.indexMonth = 11\n self.setCurrentYear(currentYear-1)\n self.setBaseDate(self.baseDate.addMonths(-1))\n # print('new Year: ', self.currentYear)\n if currentMonth != self.indexMonth:\n # print(f'currentPageChanged.emit({self.indexMonth})')\n self.currentPageChanged.emit(self.indexMonth)\n self.combo_mesi.setCurrentIndex(self.indexMonth)\n if currentYear != self.currentYear:\n # print('current year changed')\n self.setListaGiorniDellAnno(self.createDates(self.baseDate), self.indexMonth)", "def test_put_wrong_data(self):\n new_data = {\"fromMonth\": \"another\"}\n response = self.client.put(self.url + str(self.current_data[-1]['id']) + '/', data=json.dumps(new_data),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())", "def month(self, month: str):\n return get_from_list(self.months, \"month\", month)", "def Month(self):\n return self._fmon", "def news_for_month(self):\n\n raise NotImplementedError", "def _get_months(self, cr, uid, context):\n months=[(str(n),str(n)) for n in range(1,13)]\n return months", "def __getMonth(self,xml):\n\t\t#TODO: Monat, Jahr, SollStunden, Urlaub,ZeitdiffAkt, ZeitdiffVor, erweitert\n\t\tdayTypeMapping = {'Arbeitstag': DayType.work,\n\t\t\t\t\t\t'Wochenende': DayType.weekend,\n\t\t\t\t\t\t'Urlaub': DayType.vacation,\n\t\t\t\t\t\t'Feiertag': DayType.holiday,\n\t\t\t\t\t\t'Krankheit': DayType.illness,\n\t\t\t\t\t\t'รœberstunden genommen': DayType.overtime_free,\n\t\t\t\t\t\t'Dienstreise': DayType.business_trip,\n\t\t\t\t\t\t'Freistellung': DayType.unpaid_free}\n\t\tworkdays = {}\n\t\tmonthNum = int(xml.find('Monat').text)\n\t\tyearNum = int(xml.find('Jahr').text)\n\t\tif xml.find('Erweitert').text == 'true':\n\t\t\textendedFormat = True\n\t\telse:\n\t\t\textendedFormat = False\n\t\tfor panday in xml.findall('Tag'):\n\t\t\t# parse\n\t\t\tnumday = int(panday.find('Datum').text)\n\t\t\tdaytype = panday.find('TagesTyp').text\n\t\t\tdescription = panday.find('Bemerkung').text\n\t\t\tmorning = panday.find('Vormittag').text\n\t\t\tafternoon = panday.find('Nachmittag').text\n\t\t\tif extendedFormat:\n\t\t\t\tthird = panday.find('Dritte').text\n\t\t\t\tfourth = panday.find('Vierte').text\n\t\t\telse:\n\t\t\t\tthird = None\n\t\t\t\tfourth = None\n\t\t\t# convert\n\t\t\tdaytype = dayTypeMapping[daytype]\n\t\t\tmorning = self. _parsePANTimeRange(morning)\n\t\t\tafternoon = self. _parsePANTimeRange(afternoon)\n\t\t\tthird = self. _parsePANTimeRange(third)\n\t\t\tfourth = self. _parsePANTimeRange(fourth)\t\t\t\n\t\t\ttimeblocks = [morning, afternoon, third, fourth]\n\t\t\ttimeblocks = list(filter(None, timeblocks))\n\t\t\t# save\n\t\t\tday = WorkDay(daytype, description, timeblocks)\n\t\t\tworkdays[numday] = day\n\t\tmonth = WorkMonth(yearNum,monthNum,workdays)\n\t\treturn month", "def __get_step1_end_month(yaml_content: dict) -> str:\n\n end_month = None\n\n try:\n end_month = yaml_content['step1.end_month']\n except KeyError as exc:\n print(ConfigurationFactory.__get_key_missing_error_message(exc))\n\n return end_month", "def get_main_date(self, kwargs):\n month = int(kwargs['month'])\n if month >= 9 and month <= 12:\n main_date = datetime(int(kwargs['year_from']), month, 1)\n else:\n main_date = datetime(int(kwargs['year_to']), month, 1)\n return main_date", "def _get_eur_gbp_last_month(self) -> None:\n last_month = _last_month()\n data = _get_ecb_data(FREQUENCY_MONTHLY, last_month, last_month)\n\n self.eur_gbp_last_month = _get_latest_ecb_rate(data)", "def month(self) -> str:\r\n return self._month", "def set_period(self, yearmonth):\n if not isinstance(yearmonth, int):\n yearmonth = int(yearmonth)\n year = int(yearmonth / 100)\n if self._set_year(year) is False:\n return False\n return self._set_month(yearmonth % year)", "def comp_month_range():\n word_months = _(\"months\")\n word_month = _(\"month\")\n COMP_MONTH_LIST = (\n (12, '- 12 ' + word_months),\n (11, '- 11 ' + word_months),\n (10, '- 10 ' + word_months),\n (9, '- 9 ' + word_months),\n (8, '- 8 ' + word_months),\n (7, '- 7 ' + word_months),\n (6, '- 6 ' + word_months),\n (5, '- 5 ' + word_months),\n (4, '- 4 ' + word_months),\n (3, '- 3 ' + word_months),\n (2, '- 2 ' + word_months),\n (1, '- 1 ' + word_month),\n )\n return COMP_MONTH_LIST", "def months(self, months):\n allowed_values = [\"january\", \"feburary\", \"march\", \"april\", \"may\", \"june\", \"july\", \"august\", \"september\", \"october\", \"november\", \"december\"] # noqa: E501\n if not set(months).issubset(set(allowed_values)):\n raise ValueError(\n \"Invalid values for `months` [{0}], must be a subset of [{1}]\" # noqa: E501\n .format(\", \".join(map(str, set(months) - set(allowed_values))), # noqa: E501\n \", \".join(map(str, allowed_values)))\n )\n\n self._months = months", "def test_get_occurrences_monthly_mid_month(self):\n print()\n print(\"Get occurrences of a monthly expense between:\")\n expense = BudgetExpense.objects.get(id = 100)\n start_date = expense.start_date\n end_date = start_date + timedelta(days = 40)\n print(start_date.strftime(\"%B %d, %y\")+\" and \"+end_date.strftime('%B %d, %y'))\n print(\"======================================\")\n result = get_anticipated_transaction_occurences(anticipated_transaction= expense, start_date = start_date, end_date = end_date)\n result_dates = []\n for current_expense in result.keys():\n print(current_expense)\n print(\"========================\")\n result_dates.extend(result.get(current_expense))\n for current_date in result_dates:\n print(\"Date: \"+current_date.strftime(\"%B %d, %y %T\"))\n print(\"======================\")\n print()\n date_1 = start_date\n days_in_month = monthrange(start_date.year, start_date.month)[1]\n date_2 = start_date + timedelta(days = days_in_month)\n \n self.assertEquals([date_1, date_2], result_dates)", "def get_month(self, indate):\n return indate.strftime(\"%B\") + \"-\" + indate.strftime(\"%Y\")", "def _set_value_date_32A(self, val):\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount = val\n self.swift_obj.ValueDateCurrencyInterbankSettledAmount.swiftTag = \"32A\"", "def _boundary_value(self) -> str:\n ...", "def problem3_3(month, day, year):\r\n \r\n months = (\"January\", \"February\", \"March\",\"April\",\"May\",\"June\",\"July\",\\\r\n \"August\",\"September\",\"October\",\"November\",\"December\")\r\n month = month - 1 \r\n Month_prin = months[month]\r\n Date_print = Month_prin + \" \" + str(day) + \",\" + \" \" +str(year)\r\n print(Date_print)", "def month(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"month\")", "def fix_single_digit_month(connection):\n _update_date_by_regexp(connection=connection,\n regexp=\"^[0-9]{1}/[0-9]{1,2}/[0-9]{4}$\",\n new_value=\"CONCAT('0', cav.attribute_value)\")", "def check_dates(self, kwargs):\n month = int(kwargs['month'])\n if (int(kwargs['year_from']) >= int(kwargs['year_to'])) or \\\n (month < 1 or month > 12):\n # kdyby datumy byly nejake dodrbane, tak se sverime do pece autoredirectu\n return HttpResponseRedirect(reverse('admin_redir'))\n return None", "def month_bounds(year, month):\n year = int(year)\n month = int(month)\n month_start = datetime.strptime('%s,%s,1' % (year, month),'%Y,%m,%d')\n # days_in_month returns a tuple(weekday, days) where\n # weekday is the eekday the month starts on and days is the number of days in the month\n days_in_month = calendar.monthrange(year,month)\n month_end = month_start + timedelta(days=days_in_month[1]-1)\n return (month_start, month_end)", "def get_api_info_option_month_and_underlying_month_r(\n self,\n\n headers: t.Dict[str, str] = None,\n body: JSONEncodable = None,\n fields_data: t.Dict[str, str] = None,\n **kwargs\n ):\n r = self._do_call(\n method='GET',\n url=f'{self.API_BASE_URL}/info/option-month-and-underlying-month',\n headers=headers,\n body=body,\n fields=fields_data,\n **kwargs\n )\n return r", "def _build_data_result(self, efem, list_efem):\n result_data = dict()\n result_data[CURRENT_DAY] = list(efem.values())[0]['msj_efem']\n result_data[MONTH] = dict()\n result_data[MONTH] = self._build_efem_month(list_efem)\n return result_data", "def month(self):\n return gocept.month.Month(self.calendar_month, self.calendar_year)", "def set_M(self, morb):\n if not (0 < morb <= 12):\n raise ValueError\n self.set_par('Df_cnf_Fock', '')\n self.set_par('MORB', morb)", "def __init__(__self__, *,\n day_of_month: pulumi.Input[int],\n hand_off_time: pulumi.Input[str]):\n pulumi.set(__self__, \"day_of_month\", day_of_month)\n pulumi.set(__self__, \"hand_off_time\", hand_off_time)", "def cc_expire_months():\n months = []\n for month in range(1, 13):\n if len(str(month)) == 1:\n numeric = '0' + str(month)\n else:\n numeric = str(month)\n months.append((numeric, datetime.date(2009, month, 1).strftime('%B')))\n return months", "def get_business_day_of_month(year, month, count):\n r = rrule.rrule(\n rrule.MONTHLY, byweekday=(rrule.MO, rrule.TU, rrule.WE, rrule.TH, rrule.FR),\n dtstart=datetime.datetime(year, month, 1),\n bysetpos=count)\n res = r[0]\n if (res is None or res.month != month or res.year != year):\n raise ValueError(\"No dates found in range. is there a flaw in your logic?\")\n return res.date()", "def month(self) -> int:\n if self.is_old_style:\n return int(self.split('/', 1)[1][2:4])\n return int(self[2:4])", "def test_invalid_out_of_bounds_year(self):\n year, month, error = clean_year_month(2014, 100000, 1)\n self.assertEqual(year, now.year)\n self.assertEqual(month, timezone.localtime(timezone.now()).month)\n self.assertEqual(error, ERROR)", "def getSpecificMonth(self, month, year):\n try:\n specificMonth = []\n args = [month, year]\n months = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"June\",\n \"July\", \"Aug\", \"Sept\", \"Oct\", \"Nov\", \"Dec\"]\n result_args = self.cursor.callproc(\"monthly_prev_months\", args)\n for result in self.cursor.stored_results():\n for r in result:\n specificMonth = [months[r[0]-1] +\n \" \" + str(r[1]), str(r[2])]\n return list(specificMonth)\n\n except Exception as e:\n return \"Error:\" + e", "def test_march_plus_zero(self):\n month, year = get_month_from_offset(3, 2000, 0)\n self.assertEqual(month, 3)\n self.assertEqual(year, 2000)", "def EDATE(start_date, months):\n return DATE(start_date.year, start_date.month + months, start_date.day)", "def E_Dynamic_MavkoEtAl2009(rhob,DTS,PR):\n E = (2*(rhob*1000)*((304800/DTS)**2)*(1+PR))/1000000\n return E", "def sendM(self):\n startM = self.countMissionaryOnStart()\n if startM < 1:\n return None\n else:\n newStart = str(startM-1) + self.start[1:]\n newEnd = str(4-startM) + self.end[1:]\n return MissionaryState(newStart,newEnd,\"sendM\")", "def mapMaufromByte(self, date, bytes):\n sMonth = date.strftime(self.config.MONTH_FORMAT)\n reKey = self.config.dau_keys_conf['mau'].format(month=sMonth)\n redis_cli = self.get_redis_cli()\n logging.debug('Save mau from bytes: %s' % reKey)\n redis_cli.set(reKey, bytes)", "def EOMONTH(start_date, months):\n return DATE(start_date.year, start_date.month + months + 1, 1) - datetime.timedelta(days=1)", "def set_start():\n app.logger.debug(\"Got a JSON set_start post\");\n global dateFormat\n reply = {}\n\n flask.session[\"bStart\"] = request.form[\"bStart\"]\n flask.session[\"bLength\"] = request.form[\"bLength\"]\n bLength = int(request.form[\"bLength\"])\n try:\n start = arrow.get(flask.session[\"bStart\"], \"YYYY/MM/DD HH:mm\")\n except:\n reply[\"message\"] = \"Bad date Time.\"\n return jsonify(result=reply)\n \n brevet = AcpBrevet(bLength, start)\n open_limit = brevet.calc_open(0,bLength)\n close_limit = brevet.calc_close(0,bLength)\n\n reply[\"message\"] = \"Start of event and length set.\"\n reply[\"open\"] = open_limit.format(dateFormat)\n reply[\"close\"] = close_limit.format(dateFormat)\n return jsonify(result=reply)" ]
[ "0.56795377", "0.5456256", "0.5432058", "0.5432058", "0.5389009", "0.5362143", "0.5330217", "0.52422476", "0.52419305", "0.52336526", "0.5153409", "0.5141174", "0.5082032", "0.5056412", "0.50128525", "0.49637815", "0.49588114", "0.49463055", "0.489759", "0.48916426", "0.4884086", "0.48826113", "0.4880065", "0.4879451", "0.4879091", "0.48723045", "0.48521793", "0.48487034", "0.4847336", "0.4832546", "0.48271945", "0.48233423", "0.47879532", "0.47785643", "0.47773507", "0.47440028", "0.47159767", "0.47026587", "0.46509832", "0.4643982", "0.46347803", "0.4627468", "0.46215367", "0.4621386", "0.46174604", "0.4593742", "0.45804474", "0.45695925", "0.4562077", "0.45551163", "0.45551163", "0.45501304", "0.45444503", "0.45342392", "0.45310897", "0.45283735", "0.45209906", "0.4519975", "0.45131662", "0.451262", "0.45027614", "0.4499897", "0.44976974", "0.449766", "0.449459", "0.447541", "0.4474811", "0.44728088", "0.44716147", "0.44707435", "0.4463396", "0.44608703", "0.4459983", "0.445746", "0.44465524", "0.4438715", "0.442969", "0.44290376", "0.4425211", "0.44217893", "0.44164222", "0.44148818", "0.44103935", "0.44095272", "0.4403949", "0.43995607", "0.43970266", "0.4395348", "0.43952236", "0.43893746", "0.43860832", "0.4384496", "0.43844685", "0.4377352", "0.43741533", "0.43740195", "0.43700436", "0.43651652", "0.43402538", "0.43400335", "0.4329933" ]
0.0
-1
Send null value in EYear field
def test_23(self): assert 'False' == Api.requestBlock('test-23')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_param_year_is_none(self):\n test_date = get_by_values(Ordinal.first, Weekday.Saturday, Month.May)\n self.assertEquals(date.today().year, test_date.year)", "def set_year(self, year: int) -> None:\n if year <= 0:\n # Seriously, are you trying to convince me this song was made before Christ?\n self.year = None\n return\n self.year = year", "def Year(self, default=None):\n return self.data.get('year', default)", "def Year(self, default=None):\n return self.data.get('year', default)", "def Year(self, default=None):\n return self.data.get('year', default)", "def get_year(self) -> Optional[int]:\n return self.year", "def set_Year(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Year', value)", "def year(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"year\")", "def get_number_year(text):\n val = get_number(text)\n if val is None or val < 1700 or val > (datetime.date.today().year + 1):\n return None\n return val", "def set_start_year(self, year):\n return self.form.set_value(\"output period \\\"year from\\\"\", str(year))", "def set_year (self, year):\n self.year = year", "def get_year(self):\n return self.year", "def showPreviousYear(self):\n pass", "def _sanitize_year(self, datestr):\n try:\n year = str(datetime.datetime.strptime(datestr, '%Y').date().year)\n except:\n try:\n year = str(datetime.datetime.strptime(datestr,\n '%Y-%m-%d').date().year)\n except:\n year = None\n return year", "def getIssnYear(artMeta):\n if artMeta == None:\n return\n else:\n issn = getIssn(artMeta)\n if issn == '':\n issn = artMeta['journal']\n if issn == '':\n return ('noJournal', artMeta['year'])\n issnYear = (issn, artMeta['year'])\n return issnYear", "def set_calender_year(self, year):\n self.single_selection_from_kendo_dropdown(self.calender_year_kendo_dropdown_locator, year)", "def set_finish_year(self, year):\n return self.form.set_value(\"output period \\\"year to\\\"\", str(year))", "def year(self, new_year):\n if self.valid_year(new_year):\n self._year = new_year\n else:\n raise ValueError", "def year(self, year):\n\n self._year = year", "def year(cls, year: typing.Union[int, str])->str:\n yearstr: str\n if isinstance(year, int):\n yearstr = str(year)\n else:\n yearstr = year\n return cls.DATE_AND_TIMES_SIGIL + yearstr + \"-01-01T00:00:00/9\"", "def year(self):\n return self._year", "def year(self):\n return self._year", "def get_user_year(self, user):\n if self.user_have_year(user):\n return int(user['bdate'].split(\".\")[-1])\n else:\n return None", "def test_required_year_of_birth_missing(self):\r\n self.url_params['year_of_birth'] = ''\r\n response = self.client.post(self.url, self.url_params)\r\n self.assertEqual(response.status_code, 400)\r\n obj = json.loads(response.content)\r\n self.assertEqual(\r\n obj['value'],\r\n u'Your year of birth is required',\r\n )", "def yearname(self):\n return self.strftime(\"%Y\")", "def increment_year(self):", "def year(self):\n return self._years", "def make_year(res):\n return str(res['issued']['date-parts'][0][0])", "def __year(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"year\",\n operand1=self,\n operand2=None\n )", "def showNextYear(self):\n pass", "def form_InputDateNoneValue(request):\n schema = schemaish.Structure()\n schema.add('inputStrip', schemaish.Date(default=datetime.date(1900,1,1)))\n\n form = formish.Form(schema, 'form')\n form['inputStrip'].widget = formish.Input(empty=datetime.date(1900,1,1),roundtrip_empty=True)\n return form", "def validate_issue_year(issue_year: str) -> None:\n if not 2010 <= int(issue_year) <= 2020:\n raise ValueError(\"Issue year is outside permissible range\")", "def __init__(self, data, year):\n self.year = year\n self.data = data", "def year(self) -> int:\r\n return self._year", "def _fill_date(self):\n if not self.date['year']:\n self.date['year'] = self.DEFAULT_DATE['year']\n if not self.date['month']:\n self.date['month'] = self.DEFAULT_DATE['month']\n if not self.date['day']:\n self.date['day'] = self.DEFAULT_DATE['day']", "def model_end_year(self, model_end_year):\n\n self._model_end_year = model_end_year", "def test_post_cve_id_empty_year(reg_user_headers):\n res = requests.post(\n f'{env.AWG_BASE_URL}{CVE_ID_URL}',\n headers=reg_user_headers,\n params={\n 'amount': '10',\n 'batch_type': 'sequential',\n 'cve_year': '',\n 'short_name': 'mitre'\n }\n )\n assert res.status_code == 400\n response_contains_json(res, 'error', 'BAD_INPUT')", "def model_end_year(self):\n return self._model_end_year", "def __nonzero__(self):\n return not (self.year is None and\n self.month is None and\n self.day is None)", "def setYear(self, *args):\n return _libsbml.Date_setYear(self, *args)", "def get_year(x):\n return x[\"SALE DATE\"].year", "def _set_year(self, year) -> bool:\n if self.set_start_year(year) is False:\n return False\n return self.set_finish_year(year)", "def test_convert_date_to_year(self):\n # TODO there might be a more robust way to write this with try except statements.", "def model_start_year(self, model_start_year):\n\n self._model_start_year = model_start_year", "def test_valid_year(self):\n ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')\n obj = ar[2009]\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsYear))", "def year_dropdown_populator():\n start_year_unique = funding_data['start_year'].unique()\n\n year_list = []\n for i in start_year_unique:\n if i == -1:\n # print({'label': i, 'value': i})\n # NA values has been changes to -1\n year_list.append({'label': 'NA', 'value': -1})\n else:\n x = int(i)\n # print({'label': x, 'value': i})\n year_list.append({'label': i, 'value': i})\n return year_list", "def yy(self):\n return str(self._year)[-2:]", "def test_year_filtering(self):\n # Get a valid date\n entry = Entry.objects.get(id=1)\n params = {\"year\": entry.publication_date.year}\n\n self._test_filtering(**params)", "def minyear():\n\n return datetime.MINYEAR", "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def test_spider_gets_specific_year(self):\n spider = Eia923Spider()\n resp = factories.TestResponseFactory(eia923=True)\n\n result = spider.form_for_year(resp, 2007)\n\n assert result is not None\n assert result.url == \"https://www.eia.gov/electricity/data/eia923/\" \\\n \"archive/xls/f906920_2007.zip\"\n assert result.meta[\"year\"] == 2007\n\n for year in range(2001, 2019):\n result = spider.form_for_year(resp, year)\n assert result is not None", "def fix_date(self, values):\n values[YEAR_CELL_INDEX] = 2017\n\n return self.generate_date(values)", "def model_start_year(self):\n return self._model_start_year", "def get_year_end(x: Optional[Date] = None) -> Date:\n return (x or get_today()).replace(month=12, day=31)", "def getagefromyear(year=None):\n if year is None:\n print(\"Please enter the year to assign class to them\")\n try:\n t = datetime.datetime.today()\n b = datetime.datetime.strptime(str(year), '%Y')\n a = (t - b).days / 365\n a = int(a)\n if (a < 10) or (a > 80):\n a = None\n except:\n a = None\n return a", "def test_invalid_beginning_of_year(self):\n year, month, day, error = clean_year_month_day(2014, 12, 31, 1)\n self.assertEqual(year, 2015)\n self.assertEqual(month, 1)\n self.assertEqual(day, 1)\n self.assertEqual(error, False)", "def fix_years(self, row):\n raise NotImplementedError", "def clean_year(palabra):\n palabra = str(palabra)\n palabra = palabra.replace('.', '')\n if palabra < '1000' or palabra > '2021':\n return None\n else:\n return palabra", "def year(self) -> int:\n return self.arxiv_id.year", "def yearlyDepreciation():\n return .10", "def get_year(parameters_dictionary):\n if \"start-year\" in parameters_dictionary.keys():\n year = int(parameters_dictionary[\"start-year\"])\n return str(year) + str(year + 1)\n elif \"end-year\" in parameters_dictionary.keys():\n year = int(parameters_dictionary[\"end-year\"])\n return str(year - 1) + str(year)\n else:\n return str(THIS_YEAR - 1) + str(THIS_YEAR)", "def getYear(movieInfo):\n if \"release_date\" in movieInfo:\n date = movieInfo[\"release_date\"]\n if (date != '') :\n return datetime.strptime(date, \"%Y-%m-%d\").year\n else:\n return 0\n else:\n raise AttributeError(\"%s instance has no attribute release_date\" % movieInfo)", "def get_date_display(self, context):\n return '{year}'.format(year=self.get_year())", "def run_year(self, year):\n pass", "def yearShown(self):\n return self.currentYear", "def maxyear():\n\n return datetime.MAXYEAR", "def end_year(self) -> float:\n\n end_year = -np.inf\n for td_table in list(self.tdve.values()) + self.transfers + self.interpops:\n if len(td_table.tvec) and np.amax(td_table.tvec) > end_year:\n end_year = np.amax(td_table.tvec)\n return end_year", "def _year_of_graduation(self):\n return str((12 - int(self._grade)) + self._this_year)[2:]", "def get_is_null_label(self):\n return pgettext_lazy('listfilter AbstractDateTime', 'Has no value')", "def complete_zeros(df_dm,year):\n df_dm.insert(1,year,0)\n return df_dm", "def date_year(date):\n return date.year", "def year(self) -> int:\n if self.is_old_style:\n yy = int(self.split('/', 1)[1][0:2])\n else:\n yy = int(self[:2])\n if yy > 90:\n return 1900 + yy\n return 2000 + yy", "def this_year(self):\n if self.time.year != datetime.datetime.now().year or self._this_year is None:\n self._update_time()\n return self._this_year", "def closeyear(year):\n\n # Return the specific year\n return int(year % 4)", "def getYear(self):\n return _libsbml.Date_getYear(self)", "def format_field(self, value, format_spec):\n if value is None:\n return format(value)\n else:\n return super(NoneFormatter, self).format_field(value, format_spec)\n if value is None:\n return format(value)\n else: raise e", "def testEndYear(self):\n sdq1 = getattr(self.s1, 'sdq1')\n self.app.REQUEST.form['endingYear'] = '1969'\n app = self.app\n dummy_controller_state = ControllerState(\n id='base_edit',\n context=sdq1,\n button='submit',\n status='success',\n errors={},\n next_action=None,)\n controller = self.portal.portal_form_controller\n controller_state = controller.validate(dummy_controller_state, app.REQUEST, ['validate_base',])\n errors = controller_state.getErrors()\n errors = sdq1.post_validate(self.app.REQUEST, errors)\n assert errors != {}, \"Validation error not raised\"\n assert errors.has_key('endingYear')", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def get_year():\n try:\n year = input(\"Enter Year: \")\n year = int(year)\n if year > 2021 or year < 2000:\n os.system('cls')\n print(\"Accepted Values: 2000-2021\")\n return get_year()\n else:\n os.system('cls')\n return year\n except ValueError:\n os.system('cls')\n print(\"Accepted Values: 2000-2021\")\n return get_year()", "def get_year(self) -> str:\n return str(self.movie.releasedate.year)", "def normalise_two_digit_year(y):\r\n if y[0] == \"'\":\r\n y = y[1:]\r\n if int(y) < 39:\r\n return '%04d' % (int(y) + 2000)\r\n elif int(y) < 100:\r\n return '%04d' % (int(y) + 1900)\r\n else:\r\n return '%04d' % int(y[:4])", "def search_year(self,strz):\t\n\t\tyr_pattern = compile(\"(19[56789]\\d|20[01]\\d)\")\n\t\tyr = yr_pattern.search(strz)\t\t\n\t\tif yr is None:\n\t\t\treturn strz\t#not find\n\t\telse:\n\t\t\tyr= yr.group(1)\n\t\t\tself.release_year=yr\n\t\t\treturn strz.replace(yr,\"\")", "def YEAR(\n serial_number: func_xltypes.XlNumber\n) -> func_xltypes.XlNumber:\n\n date = utils.number_to_datetime(int(serial_number))\n\n if (int(date.strftime(\"%Y\")) < int(utils.EXCEL_EPOCH.strftime(\"%Y\"))) \\\n or (int(date.strftime(\"%Y\")) > 9999):\n raise xlerrors.ValueExcelError(\n f'year {date.strftime(\"%Y\")} must be after \\\n {utils.EXCEL_EPOCH.strftime(\"%Y\")} and before 9999')\n\n return int(date.strftime(\"%Y\"))", "def clean_year_suciedad(palabra):\n\n try:\n if len(palabra) < 4:\n return None\n else:\n return palabra\n except:\n pass", "def addingNull(self, database):\r\n try:\r\n date = self.lineWidgets[\"FECHA\"].text()\r\n try:\r\n month = int(date.split(\"-\")[1])\r\n except ValueError:\r\n month = int(date.split(\"-\")[1][0])\r\n year = int(date.split(\"-\")[0])\r\n self.conn = connect(\"database.sqlite\")\r\n self.cur = self.conn.cursor()\r\n self.cur.execute(\r\n f'''INSERT INTO {database} (date, month_id, year, concept, \r\n value) VALUES(?, ?, ?, ?, ?)\r\n ''', (date, month, year, \"NADA\", 0))\r\n self.conn.commit()\r\n self.cur.close()\r\n except (ValueError, IndexError):\r\n QMessageBox.critical(\r\n self, \"ERROR\", '''Put the date in its correct form''')", "def get_year(date):\n return date.strftime('%Y')", "def validate_expiration_year(expiration_year: str) -> None:\n if not 2020 <= int(expiration_year) <= 2030:\n raise ValueError(\"Expiration year is outside permissible range\")", "def test_year_2000(self):\r\n season = \"1999-00\"\r\n res = get_end_year(season)\r\n assert res == 2000", "def year_scheme_revenue_neutral_rule(_m, y):\r\n\r\n return m.YEAR_SCHEME_REVENUE[y] == 0", "def get_year(self, grab):\n return int(\n grab.doc.select(\n '//time[@itemprop=\"releaseDate\"]'\n ).attr('datetime')\n )", "def model_year(self):\n # df_year = (self.doc.groupby('year')['token_speech'].sum().reset_index())\n df_year = (self.doc.groupby('year')['combined'].sum().reset_index())\n\n \"\"\"Applies LDA model\"\"\"\n df_year = self.model_unseen(df_year)\n\n return df_year # self.doc.groupby(['year']).sum().reset_index()", "def nullValueToNan(self) -> None:\n self.cpp.nullValueToNan()", "def start_year(self) -> float:\n\n start_year = np.inf\n for td_table in list(self.tdve.values()) + self.transfers + self.interpops:\n if len(td_table.tvec) and np.amin(td_table.tvec) < start_year:\n start_year = np.amin(td_table.tvec)\n return start_year", "def test_none(self):\n descriptor = clone(SPECIES_OBSERVATION_SCHEMA)\n record = {\n 'Observation Date': \"18/08/2016\",\n 'Latitude': -32,\n 'Longitude': 115,\n 'Species Name': None\n }\n schema = SpeciesObservationSchema(descriptor)\n with self.assertRaises(Exception):\n schema.cast_species_name(record)", "def get_year_desc():\n return Year.objects.all().order_by('-name')", "def read_year():\n try:\n year = int(input(\"Enter year: \"))\n return year\n except:\n print(\"Invalid input! Enter year!\")\n exit(-1)", "def get_year(time_index):\n return np.array(time_index.year).reshape(-1,1)", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def null_value_handler(datatype, value, null_format):\n if DataType.Name(datatype) == \"STRING\":\n if NullValues.STRING == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DOUBLE\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"FLOAT\":\n if math.isnan(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT32\":\n if NullValues.INT32 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"INT64\":\n if NullValues.INT64 == value:\n return null_format\n return value\n elif DataType.Name(datatype) == \"DURATION\":\n if NullValues.DURATION.equals(value):\n return null_format\n return value\n elif DataType.Name(datatype) == \"TIMESTAMP\":\n if NullValues.TIMESTAMP.equals(value):\n return null_format\n return value\n else:\n return value", "def index(self, year=None):\n if year is not None:\n return year.start.year - self.year.start.year + 1\n else:\n return self.year.index()", "def isoformat(self):\n return \"\"" ]
[ "0.6695768", "0.64100987", "0.63534063", "0.63534063", "0.63534063", "0.63124233", "0.6275319", "0.60871744", "0.5998854", "0.5980421", "0.5950252", "0.58238184", "0.58105856", "0.5803543", "0.57519025", "0.5699215", "0.5694554", "0.56552154", "0.5642766", "0.5622533", "0.55991024", "0.55991024", "0.5572973", "0.5571081", "0.5554271", "0.5552238", "0.55338866", "0.55101615", "0.5506302", "0.5489991", "0.5475768", "0.54735917", "0.5455694", "0.5446127", "0.5432284", "0.5426798", "0.5426527", "0.5405271", "0.54025334", "0.5396695", "0.5389992", "0.53898114", "0.5385067", "0.5377096", "0.5357363", "0.53539276", "0.5339416", "0.53389144", "0.53275555", "0.5325159", "0.5284816", "0.52767986", "0.5248586", "0.5220694", "0.5211659", "0.5207559", "0.520055", "0.51937985", "0.51813775", "0.5179436", "0.5159888", "0.5159731", "0.5146457", "0.51428354", "0.51338667", "0.5117944", "0.5113773", "0.51082075", "0.50963694", "0.5084441", "0.50820637", "0.50770074", "0.50766206", "0.50757354", "0.5070471", "0.5064847", "0.50642735", "0.50526637", "0.5048836", "0.50451136", "0.5012646", "0.50066495", "0.49989003", "0.49937984", "0.4961637", "0.49549815", "0.49279737", "0.4926925", "0.49191353", "0.49135068", "0.49092668", "0.48930502", "0.4887369", "0.48865715", "0.48844716", "0.48837718", "0.48817557", "0.48647514", "0.48647514", "0.4861993", "0.48598883" ]
0.0
-1
Send special characters in EYear field
def test_24(self): assert 'False' == Api.requestBlock('test-24')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def year(cls, year: typing.Union[int, str])->str:\n yearstr: str\n if isinstance(year, int):\n yearstr = str(year)\n else:\n yearstr = year\n return cls.DATE_AND_TIMES_SIGIL + yearstr + \"-01-01T00:00:00/9\"", "def test_get_cve_id_year_format_with_letters(reg_user_headers):\n res = requests.get(\n f'{env.AWG_BASE_URL}{CVE_ID_URL}',\n headers=reg_user_headers,\n params={\n 'cve_id_year': 'test',\n }\n )\n assert res.status_code == 400\n response_contains_json(res, 'error', 'BAD_INPUT')", "def yy(self):\n return str(self._year)[-2:]", "def make_year(res):\n return str(res['issued']['date-parts'][0][0])", "def yearname(self):\n return self.strftime(\"%Y\")", "def get_year(string): \n return int(string[11:15])", "def year(self) -> int:\n if self.is_old_style:\n yy = int(self.split('/', 1)[1][0:2])\n else:\n yy = int(self[:2])\n if yy > 90:\n return 1900 + yy\n return 2000 + yy", "def set_Year(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Year', value)", "def normalise_two_digit_year(y):\r\n if y[0] == \"'\":\r\n y = y[1:]\r\n if int(y) < 39:\r\n return '%04d' % (int(y) + 2000)\r\n elif int(y) < 100:\r\n return '%04d' % (int(y) + 1900)\r\n else:\r\n return '%04d' % int(y[:4])", "def set_start_year(self, year):\n return self.form.set_value(\"output period \\\"year from\\\"\", str(year))", "def test_get_cve_id_year_format_with_digits(reg_user_headers):\n res = requests.get(\n f'{env.AWG_BASE_URL}{CVE_ID_URL}',\n headers=reg_user_headers,\n params={\n 'cve_id_year': '20111',\n }\n )\n assert res.status_code == 400\n response_contains_json(res, 'error', 'BAD_INPUT')", "def get_date(self,yearlimits=[1500,2020]):\n\t\thead = self.raw_text()[:300] \t \t \n\t\tparser = Regexdate(head) \t \t\t\n\t\tyear = parser.find_year(yearlimits)\t\t\n\t\tmonth = parser.find_month()\n\t\tday = parser.find_day()\n\t\tif day and year != \"\":\n\t\t\treturn year + \"-\" + month + \"-\" + day\t\n\t\tif year:\n\t\t\treturn year\n\t\treturn \"\"", "def get_event_year(eventName):\n\n\tyear = eventName.split(\"-\")[1]\n\treturn year", "def search_year(self,strz):\t\n\t\tyr_pattern = compile(\"(19[56789]\\d|20[01]\\d)\")\n\t\tyr = yr_pattern.search(strz)\t\t\n\t\tif yr is None:\n\t\t\treturn strz\t#not find\n\t\telse:\n\t\t\tyr= yr.group(1)\n\t\t\tself.release_year=yr\n\t\t\treturn strz.replace(yr,\"\")", "def set_year (self, year):\n self.year = year", "def test_spider_gets_specific_year(self):\n spider = Eia923Spider()\n resp = factories.TestResponseFactory(eia923=True)\n\n result = spider.form_for_year(resp, 2007)\n\n assert result is not None\n assert result.url == \"https://www.eia.gov/electricity/data/eia923/\" \\\n \"archive/xls/f906920_2007.zip\"\n assert result.meta[\"year\"] == 2007\n\n for year in range(2001, 2019):\n result = spider.form_for_year(resp, year)\n assert result is not None", "def _sanitize_year(self, datestr):\n try:\n year = str(datetime.datetime.strptime(datestr, '%Y').date().year)\n except:\n try:\n year = str(datetime.datetime.strptime(datestr,\n '%Y-%m-%d').date().year)\n except:\n year = None\n return year", "def set_finish_year(self, year):\n return self.form.set_value(\"output period \\\"year to\\\"\", str(year))", "def get_year(line):\n year = line.split(')')[0][-4:]\n return year", "def year(self, new_year):\n if self.valid_year(new_year):\n self._year = new_year\n else:\n raise ValueError", "def parse_year(year):\n\n return datetime.strptime(year, '%Y')", "def get_year(x):\n return x[\"SALE DATE\"].year", "def get_year(self) -> str:\n return str(self.movie.releasedate.year)", "def get_year(date):\n return date.strftime('%Y')", "def parse_academic_year(year):\n return int(year.split(\"/\")[0])", "def formatyear(self, theyear):\n\t\tv = []\n\t\ta = v.append\n\t\ta('<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" class=\"year\">')\n\t\ta('\\n')\n\t\ta('<tr><th>%s</th></tr>' % theyear)\n\t\tfor m in range(1, 13):\n\t\t\ta('<tr>')\n\t\t\ta('<td>')\n\t\t\ta(self.formatmonthinyear(theyear, m))\n\t\t\ta('</td>')\n\t\t\ta('</tr>')\n\t\ta('</table>')\n\t\treturn ''.join(v)", "def parse_year(txt):\n\n txt = txt.strip()\n if \"-\" in txt:\n res = re.sub('[^0-9]', '', txt)\n return [res[0:4], res[4:8]]\n else:\n return [txt, txt]", "def get_year(self):\n return self.year", "def test_year_boundary(self):\n date = datetime(2017, 1, 1)\n seq = 27\n name = star_barcode.barcode_filename(date, seq)\n self.assertEqual(\n name,\n 'Barcode_2016-W52-7_27.pdf'\n )", "def set_year(self, year):\n if year not in range(1970, 2120):\n raise ValueError(\"Year must be in range [1970..2129] but is {}\".format(year))\n\n # First we separate the tens and the digit. We also shift the year to\n # the range [0..159]\n tens, digit = divmod(int(year - 1970), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to a registory\n self.__write_register(_REGISTER_YEAR, reg_value)", "def showNextYear(self):\n pass", "def increment_year(self):", "def test_convert_date_to_year(self):\n # TODO there might be a more robust way to write this with try except statements.", "def year(self, year):\n\n self._year = year", "def date_year(date):\n return date.year", "def set_calender_year(self, year):\n self.single_selection_from_kendo_dropdown(self.calender_year_kendo_dropdown_locator, year)", "def year(self):\n return self._year", "def year(self):\n return self._year", "def _year_of_graduation(self):\n return str((12 - int(self._grade)) + self._this_year)[2:]", "def getIssnYear(artMeta):\n if artMeta == None:\n return\n else:\n issn = getIssn(artMeta)\n if issn == '':\n issn = artMeta['journal']\n if issn == '':\n return ('noJournal', artMeta['year'])\n issnYear = (issn, artMeta['year'])\n return issnYear", "def system_year(year):\n\tthis_query = Query('system_year', year)\n\tthis_query.send_query()\n\tresponse = this_query.pull_result()\n\treturn jsonify(response)\n\t#return render_template('response.html', response=response)", "def find_year(self,datelimits): \t \t \n year = \"\"\n\n\t \tmatch = re.search(r\"[I1][\\dG]{3}\",self.string)\n\n\t \tif match: \t \t \t \n\t \t\tif re.search(r\"(\\d{4})\",match.group()):\n\t \t\t\tyear = match.group()\n elif re.search(r\"I\\d{3}\",match.group()):\n\t \t\t\tmatch = re.sub(r\"I(\\d{3})\",r\"1\\1\",match.group())\n\t \t\t\tyear = match\n\t \t\telif re.search(r\"(\\d[G\\d]{3})\",match.group()):\n\t \t\t\tmatch = re.sub(r\"G\",r\"6\",match.group())\n\t \t\t\tyear = match\n\n \n if year == \"\" or int(year) < datelimits[0] or int(year) > datelimits[1]:\n year = \"\"\n \n\n\t \treturn year", "def extract_year(text):\n # type: (str) -> int\n data = re.search(r\"\\d{4}\", text)\n return int(data.group()) if data else 0", "def test_valid_year(self):\n ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')\n obj = ar[2009]\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsYear))", "def validate_issue_year(issue_year: str) -> None:\n if not 2010 <= int(issue_year) <= 2020:\n raise ValueError(\"Issue year is outside permissible range\")", "def showPreviousYear(self):\n pass", "def get_year(parameters_dictionary):\n if \"start-year\" in parameters_dictionary.keys():\n year = int(parameters_dictionary[\"start-year\"])\n return str(year) + str(year + 1)\n elif \"end-year\" in parameters_dictionary.keys():\n year = int(parameters_dictionary[\"end-year\"])\n return str(year - 1) + str(year)\n else:\n return str(THIS_YEAR - 1) + str(THIS_YEAR)", "def get_year(self, filename):\n year = self.file.replace('s24_', '').replace('.vrt', '')\n self.logger.info(f'This .vrt file contains data for the year {year}')\n return year", "def _two_digit_year(t):\n dt = safe_fromtimestamp(t)\n year = dt.year\n if dt.month >= 7:\n year += 1\n return \"'%02d\" % (year % 100)", "def year(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"year\")", "def test_year_2000(self):\r\n season = \"1999-00\"\r\n res = get_end_year(season)\r\n assert res == 2000", "def test_year_filtering(self):\n # Get a valid date\n entry = Entry.objects.get(id=1)\n params = {\"year\": entry.publication_date.year}\n\n self._test_filtering(**params)", "def setYear(self, *args):\n return _libsbml.Date_setYear(self, *args)", "def get_year(self):\n\n # First we get the first 8 bits stored in the yqr register\n year_bcd = self.__read_register(_REGISTER_YEAR)\n\n # Then we extract the digits and the tens\n tens = (year_bcd & 0xF0) >> 4 # 0xF0 = 0b11110000\n digit = (year_bcd & 0x0F) # 0x0F = 0b00001111\n\n # We return year value shifted in range [1970..2129]\n return (10 * (tens) + digit) + 1970", "def year(self) -> int:\r\n return self._year", "def findYear(str):\n return int(re.search(\"(?<=yr=)\\d*\", str)[0])", "def test_date_by_yr(self):\n spi_search = \"find date 2002\"\n inv_search = \"year:2002\"\n self._compare_searches(inv_search, spi_search)", "def do_ry(self, arg):\n self.do_timesheet('report year')", "def closeyear(year):\n\n # Return the specific year\n return int(year % 4)", "def get_number_year(text):\n val = get_number(text)\n if val is None or val < 1700 or val > (datetime.date.today().year + 1):\n return None\n return val", "def get_date_display(self, context):\n return '{year}'.format(year=self.get_year())", "def Year(self, default=None):\n return self.data.get('year', default)", "def Year(self, default=None):\n return self.data.get('year', default)", "def Year(self, default=None):\n return self.data.get('year', default)", "def set_year(self, year: int) -> None:\n if year <= 0:\n # Seriously, are you trying to convince me this song was made before Christ?\n self.year = None\n return\n self.year = year", "def award_id_year(award_wikidata) -> 'award_id_year':\n assert re.match(r\"^\\d{4}$\", award_wikidata['value']['year'])\n return {'value': (award_wikidata['value']['award_id'], award_wikidata['value']['year'])}", "def get_year(self, grab):\n return int(\n grab.doc.select(\n '//time[@itemprop=\"releaseDate\"]'\n ).attr('datetime')\n )", "def run_year(self, year):\n pass", "def get_year(url):\n year = re.search(\"\\d{4}\", url).group(0)\n return int(year)", "def __year(self):\n return _VirtualColumn(\n df_name=self.thisptr[\"df_name_\"],\n operator=\"year\",\n operand1=self,\n operand2=None\n )", "def test_evaluate_year_expression(self):\n for f, r in (\n (\"year\", 2013),\n (\"month\", 9),\n (\"day\", 1),\n (\"hour\", 10),\n (\"minute\", 56),\n (\"second\", 0)):\n value = self.evaluate_common(\"%s(datetime'2013-09-01T10:56')\" % f)\n self.assertTrue(\n value.type_code == edm.SimpleType.Int32, \"Expected Int32\")\n self.assertTrue(value.value == r)\n try:\n value = self.evaluate_common(\n \"%s(datetimeoffset'2013-09-01T10:56:12-05:00')\" % f)\n self.fail(\"datetimeoffset %s\" % f)\n except odata.EvaluationError:\n pass\n try:\n value = self.evaluate_common(\n \"%s(datetime'2013-09-01T10:56',\"\n \"datetime'2013-09-01T10:57')\" % f)\n self.fail(\"2 parameters\")\n except odata.EvaluationError:\n pass", "def year(self):\n return self._years", "def _four_digit_year(t):\n dt = safe_fromtimestamp(t)\n year = dt.year\n if dt.month >= 7:\n year += 1\n return str(year)", "def year(self):\n\n properties_file = open(self.scenario_path + \"/conf/sandag_abm.properties\", \"r\")\n year = None\n\n for line in properties_file:\n # strip all white space from the line\n line = line.replace(\" \", \"\")\n\n # find line containing \"scenarioYear=\"\n m = re.compile(\"scenarioYear=\").match(line)\n if m:\n # take the portion of the line after the matching string\n # and return as the scenario year\n year = int(line[m.end():])\n break\n\n properties_file.close()\n\n return year", "def string_date(mnthDay, year):\n return(mnthDay + '/' + str(year))", "def getYear(self):\n return _libsbml.Date_getYear(self)", "def test_str(self):\n ary = self.ar[2009]\n self.assertEqual(str(ary), '<AwstatsYear 2009: 11, 12>')", "def yearlyDepreciation():\n return .10", "def test_date_by_yr_mo(self):\n spi_search = \"find date 1976-04\"\n inv_search = 'year:1976-04'\n self._compare_searches(inv_search, spi_search)", "def new_years_eve(year):\n return (year, DEC, 31)", "def year(self) -> int:\n return self.arxiv_id.year", "def decadeDecoder(year):\r\n \r\n decade = int(str(year)[2])\r\n decoder = {\r\n 8:'1980\\'',\r\n 9:'1990\\'',\r\n 0:'2000\\'',\r\n 1:'2010\\''\r\n }\r\n \r\n return decoder[decade]", "def __getitem__(self, key):\n if re.match(r\"^\\d{4}$\", key):\n # year\n return Year(self, key)\n raise KeyError", "def is_valid_issue_year(issue_year: int) -> bool:\n return issue_year.isnumeric() and 2010 <= int(issue_year) <= 2020", "def columna_year(palabra):\n if len(palabra) >= 4:\n return palabra[-4:]", "def copyrightRecord(inputstring):\n \n return inputstring[19:24]", "def clean_year(palabra):\n palabra = str(palabra)\n palabra = palabra.replace('.', '')\n if palabra < '1000' or palabra > '2021':\n return None\n else:\n return palabra", "def test_models_organization_fields_code_normalize(self):\n organization = factories.OrganizationFactory()\n\n organization.code = \"Lร &รงa boรด\"\n organization.save()\n self.assertEqual(organization.code, \"LACA-BOO\")", "def testEndYear(self):\n sdq1 = getattr(self.s1, 'sdq1')\n self.app.REQUEST.form['endingYear'] = '1969'\n app = self.app\n dummy_controller_state = ControllerState(\n id='base_edit',\n context=sdq1,\n button='submit',\n status='success',\n errors={},\n next_action=None,)\n controller = self.portal.portal_form_controller\n controller_state = controller.validate(dummy_controller_state, app.REQUEST, ['validate_base',])\n errors = controller_state.getErrors()\n errors = sdq1.post_validate(self.app.REQUEST, errors)\n assert errors != {}, \"Validation error not raised\"\n assert errors.has_key('endingYear')", "def dateParser(str):\n\tyear = ''\n\tfor c in str:\n\t\tif c.isspace():\n\t\t\tyear = ''\n\t\telif c.isdigit():\n\t\t\tyear = year + c\n\t\t\tif len(year) == 4:\n\t\t\t\tbreak\n\t\telse:\n\t\t\tyear = ''\n\tif len(year) < 4:\n\t\treturn None\n\treturn int(year)", "def isoformat(self):\n s = '{0:04}'.format(self._year)\n if self._month:\n s += '-{0:02}'.format(self._month)\n if self._day:\n s += '-{0:02}'.format(self._day)\n return s", "def fix_years(self, row):\n raise NotImplementedError", "def validate_birth_year(birth_year: str) -> None:\n if not 1920 <= int(birth_year) <= 2002:\n raise ValueError(\"Birth year is outside permissible range\")", "def read_year():\n try:\n year = int(input(\"Enter year: \"))\n return year\n except:\n print(\"Invalid input! Enter year!\")\n exit(-1)", "def __init__(self, data, year):\n self.year = year\n self.data = data", "def model_end_year(self):\n return self._model_end_year", "def year_expand(s):\n regex = r\"^((?:19|20)\\d{2})?(\\s*-\\s*)?((?:19|20)\\d{2})?$\"\n try:\n start, dash, end = re.match(regex, ustr(s)).groups()\n start = start or 1900\n end = end or 2099\n except AttributeError:\n return 1900, 2099\n return (int(start), int(end)) if dash else (int(start), int(start))", "def day_of_year(date=datetime.datetime.now()):\n return date.strftime(\"Its the %j day of %Y'th year.\")", "def get_year(self) -> Optional[int]:\n return self.year", "def model_end_year(self, model_end_year):\n\n self._model_end_year = model_end_year", "def generateCopyrightStr(config, firstYear):\n copyrightStr = \"\"\n thisYear = str(datetime.date.today().year)\n\n if not firstYear or firstYear == thisYear:\n copyrightStr = \"ยฉ\" + thisYear + \" \" + \\\n config['rssAuthor'] + \". All rights reserved.\"\n else:\n copyrightStr = \"ยฉ\" + firstYear + \"โ€”\" + thisYear + \" \" + \\\n config['rssAuthor'] + \". All rights reserved.\"\n\n return unicode(copyrightStr, 'utf-8')" ]
[ "0.65770954", "0.6452787", "0.6308937", "0.624988", "0.62376374", "0.6176575", "0.6136731", "0.6083706", "0.6080361", "0.6062085", "0.5893685", "0.5889238", "0.58616006", "0.5854368", "0.5846842", "0.58077604", "0.5788896", "0.57787496", "0.57771987", "0.57497287", "0.57388437", "0.5724719", "0.57240015", "0.57211953", "0.5709528", "0.56872064", "0.56651294", "0.56525683", "0.56056607", "0.5593482", "0.5592131", "0.5583017", "0.55813247", "0.55764604", "0.55490845", "0.5548605", "0.55448586", "0.55448586", "0.5537537", "0.5528652", "0.55130106", "0.5512394", "0.55049366", "0.54798704", "0.54765564", "0.5475368", "0.5472871", "0.54488736", "0.5447101", "0.5415456", "0.54124415", "0.5404966", "0.53972536", "0.5394736", "0.5390161", "0.53863275", "0.5355852", "0.5352529", "0.5334051", "0.5316098", "0.5306242", "0.5272622", "0.5272622", "0.5272622", "0.5263064", "0.52564585", "0.5226326", "0.52262706", "0.52255464", "0.5221165", "0.5212235", "0.52121264", "0.52081144", "0.5203758", "0.51972127", "0.51810855", "0.51683545", "0.51630783", "0.5134281", "0.5133253", "0.5132765", "0.51252687", "0.510666", "0.5097759", "0.5095719", "0.50945956", "0.5091879", "0.50869113", "0.50784683", "0.5074132", "0.50679713", "0.50619495", "0.5061832", "0.5061418", "0.5052417", "0.5044819", "0.5043968", "0.5043392", "0.50366855", "0.50208336", "0.50157315" ]
0.0
-1
Check expired and current years (EYear = 18)
def test_25(self): assert 'False' == Api.requestBlock('test-25')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_exp_year(passport: map) -> bool:\n if passport.get('eyr'):\n if int(passport['eyr']) >= 2020 and int(passport['eyr']) <= 2030:\n return True\n\n return False", "def is_valid_expiration_year(expiration_year: int) -> bool:\n return expiration_year.isnumeric() and 2020 <= int(expiration_year) <= 2030", "def is_expired(self):\n\n if self._lifetime is not None and self._lifetime > 0:\n # 300 seconds waite is the tolerance !\n # The unit of lifetime is millisecond\n if (time.time() - self._create_date) * 1000 > self._lifetime + 300000:\n return True\n\n return False", "def expired(self):\n\n return self.getNotAfter() <= rpki.sundial.now()", "def cc_expire_years():\n current_year = datetime.datetime.now().year\n years = range(current_year, current_year + 12)\n return [(str(x), str(x)) for x in years]", "def is_expired(self):\n return utcnow() >= self.expires", "def _check_goauth_expiration(self, expiry):\n now = int(time.time())\n time_left = int(expiry) - now\n # 10 days\n min_time_left = 60*60*24*10\n if time_left < min_time_left:\n return False\n else:\n return True", "def validate_expiration_year(expiration_year: str) -> None:\n if not 2020 <= int(expiration_year) <= 2030:\n raise ValueError(\"Expiration year is outside permissible range\")", "def expired(self):\n return int(time.time()) > self.expires_at", "def validate_exp(self, now, leeway):\n if 'exp' in self:\n exp = self['exp']\n if not _validate_numeric_time(exp):\n raise InvalidClaimError('exp')\n if exp < (now - leeway):\n raise ExpiredTokenError()", "def is_expired(self) -> bool:\n return now() > self.expires", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return (self.date_joined + expiration_date <= datetime.datetime.now())", "def is_expired(self):\n return self.expiration_date <= self._now()", "def extended(self):\n if self.expires_at:\n return self.expires_at - self.issued_at > timedelta(days=30)\n return False", "def has_expired(self):\n self.ensure_one()\n return datetime.now() > fields.Datetime.from_string(self.expires)", "def is_access_expired(self) -> bool:\n entitlement_contract = self.cfg.entitlements.get(self.name, {})\n # TODO(No expiry per resource in MVP yet)\n expire_str = entitlement_contract.get('expires')\n if not expire_str:\n return False\n expiry = datetime.strptime(expire_str, '%Y-%m-%dT%H:%M:%S.%fZ')\n if expiry >= datetime.utcnow():\n return False\n return True", "def is_expired(self):\n if self.access_token is None:\n logging.debug('Access token not found')\n return True\n else:\n return (self.expiration <= datetime.now())", "def is_expired(self):\n delta = datetime.datetime.now() - self.created_at\n\n return delta.total_seconds() > 15*60", "def valid(self):\n return self.expiry > timezone.now()", "def is_valid_year(year):\n return 1750 <= year <= 2019", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n\n return (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def is_expired(self):\n return timeutils.utcnow_ts() > self.expire_ts", "def is_leap_year():", "def is_expired(self):\n return int(time.time()) - self.time > self.interval", "def isExpired(self):\n return True/False", "def has_expired(self) -> bool:\n raise NotImplementedError() # pragma: nocover", "def _has_expired(self):\r\n expired = False\r\n if hasattr(self, 'Expiration'):\r\n now = datetime.datetime.utcnow()\r\n expiration = datetime.datetime.strptime(self.Expiration, '%Y-%m-%dT%H:%M:%SZ')\r\n expired = (now >= expiration)\r\n else:\r\n raise ValueError(\"ERROR: Request for expired property, but no Expiration in HIT!\")\r\n return expired", "def duration(self):\n if self.is_valid:\n return relativedelta(self.expiry, datetime.date.today()).years\n else:\n return -1", "def expired(self): # pragma: no cover\n return self._state in (_State.EXPIRING, _State.EXPIRED)", "def valid_year(cls, new_year):\n if cls.MIN_YEAR <= new_year <= cls.MAX_YEAR:\n return True\n else:\n return False", "def test_expires(self):\n # We aren't bother going to test the actual time in expires, that\n # way lies pain with broken tests later.\n up = self.get(self.good_data)\n hdrs = dict(up.get_headers(1))\n lm = datetime(*utils.parsedate_tz(hdrs['Last-Modified'])[:7])\n exp = datetime(*utils.parsedate_tz(hdrs['Expires'])[:7])\n assert (exp - lm).seconds == 3600", "def is_expired(self):\n if not self.is_signed:\n return True\n return int(self._token_claims.get(self.__class__.exp_claim, 0)) < int(\n time.time()\n )", "def is_expired (self, now=None):\n if now is None: now = time.time()\n return self.is_idle_timed_out(now) or self.is_hard_timed_out(now)", "def expired(self) -> bool:\n if not self.use_wts:\n return False\n\n return datetime.now() > self.expire", "def activation_expired(self):\n return self.date_joined + timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS) < timezone.now()", "def clean_expiration_date(self):\n expiration_date = self.cleaned_data['expiration_date']\n if expiration_date.date() <= datetime.date.today():\n v_err('elapsed')\n return expiration_date", "def check_content_expiry_date(self):\n\n if self.expire_date < datetime.now():\n self.state = 'disabled'", "def test_date_by_lt_yr(self):\n spi_search = \"find date < 2002\"\n inv_search = 'year:0->2002'\n self._compare_searches(inv_search, spi_search)", "def is_expired(snap):\n exp_epoch = int(snap.split(\"_\")[const.VLAB_SNAP_EXPIRES])\n current_time = int(time.time())\n return exp_epoch < current_time", "def activation_key_expired(self):\n exp_date = timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + exp_date <= datetime.now()", "def test_is_expired_time_based(self):\n expired_dt = datetime.now() + timedelta(hours=-1)\n good_dt = datetime.now() + timedelta(hours=1)\n expired_pass = DoorPassFactory.create(device=self.device, expires_at=expired_dt)\n good_pass = DoorPassFactory.create(device=self.device, expires_at=good_dt)\n self.assertTrue(expired_pass.is_expired())\n self.assertFalse(good_pass.is_expired())", "def expiry_date(self, today):\n three_years_ago = today + relativedelta(years=-3)\n three_years_in_the_future = today + relativedelta(years=+3)\n\n return date.fromordinal(random.randint(three_years_ago.toordinal(),\n three_years_in_the_future.toordinal()))", "def validate_issue_year(passport: map) -> bool:\n if passport.get('iyr'):\n if int(passport['iyr']) >= 2010 and int(passport['iyr']) <= 2020:\n return True\n\n return False", "def _is_expired(self):\n current_time = datetime.now()\n if (current_time > self._expires_at):\n logging.debug('token expired')\n return True\n else:\n return False", "def is_expired(self):\n\n return time.time() * 1000 - self._refreshed_on > self._expire", "def addThreeYears(self, today):\n\n # if today's date is any date except for 29th February (in a leap year), try block is executed, adding 3 years to current date\n try:\n threeYearsToday = today.replace(year = today.year + 3)\n return threeYearsToday\n\n # if today's date is 29th February (in a leap year), a ValueError is raised when 3 years are added because 29th Feb will only occur every 4 years\n except ValueError:\n # adds 3 years, then subtracts 1 day, so that resulting date is 28th Feb in three years, therefore expiry date will still be in the same month in 3 years time\n threeYearsToday = today.replace(year = today.year + 3, month = today.month, day = today.day - 1)\n return threeYearsToday", "def membership_valid(self):\n\n today = date.today()\n\n if self.dues_paid is None:\n return False\n\n months = 12 if self.dues_paid_year else 6\n dues_due = datetime.combine(self.dues_paid, datetime.min.time()) + relativedelta(months=+months)\n dues_due = dues_due.date()\n\n return dues_due > today", "def test_expires_soon(self):\n now = timezone.now()\n window = SparkSettings().RENEW_TOKEN_WINDOW\n cur = self.factory.build(access_token='good',\n expires_at=now + timedelta(seconds=window*2))\n exp = self.factory.build(access_token='expired',\n expires_at=now + timedelta(seconds=window/2))\n self.assertFalse(cur.expires_soon())\n self.assertTrue(exp.expires_soon())", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + expiration_date <= datetime.datetime.now()", "def test_date_by_gt_yr(self):\n spi_search = \"find date > 1980\"\n inv_search = 'year:1980->9999'\n self._compare_searches(inv_search, spi_search)", "def is_expired(self) -> bool:\n if self.purpose == Purpose.password_reset:\n now = datetime.utcnow()\n expires_after = timedelta(hours=24)\n return now >= (self.created_at + expires_after)\n else:\n return False", "def test_centenary_positive():\n assert is_leap_year(2400) is True", "def is_expired(self):\n if self.status == TrainingStatus.ACCEPTED:\n if not self.training_type.valid_duration:\n return False\n else:\n return self.process_datetime + self.training_type.valid_duration < timezone.now()", "def check(self):\n validity_year = int(self.date[0:4])\n validity_month = int(self.date[5:7])\n validity_day = int(self.date[8:10])\n if datetime.today().year > validity_year:\n self.flag = False\n elif datetime.today().year == validity_year:\n if datetime.today().month > validity_month:\n self.flag = False\n elif datetime.today().month == validity_month:\n if datetime.today().day > validity_day:\n self.flag = False\n else:\n self.flag = True\n else:\n self.flag = True\n else:\n self.flag = True", "def get_age(self):\n today = datetime.now()\n return today.year \\\n - self.date_of_birth.year \\\n - ((today.month, self.date_of_birth.day) \\\n < (self.date_of_birth.month, self.date_of_birth.day))", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == RegistrationProfile.ACTIVATED or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def is_older_than_30(self):\n older = self.age >= 30\n return older", "def is_old(self):\n return self.age > self.lifespan", "def get_age(date):\n today = datetime.date.today()\n return today.year - date.year - ((today.month, today.day) < (date.month, date.day))", "def activation_key_expired(self):\r\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\r\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\r\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def isCurrentYear(self):\n t = time()\n return safegmtime(t + _tzoffset(self._tz, t))[0] == self._year", "def _check_leap(year):\n\n return ((year % 4) == 0) and (not(((year % 100) == 0) and ((year % 400) != 0)))", "def expired(self):\n return rospy.get_rostime() - self.start_time > self.duration", "def _verify_timeout(self, doc):\n expires = doc['expires']\n if expires == 0:\n return False\n if expires >= self._time():\n return False\n return True", "def is_expired(self):\n return self._bExpired", "def __expired_timestamp(self, timestamp):\n return int(time.time()) > timestamp + self.__ttl", "def test_positive():\n assert is_leap_year(2016) is True", "def isValid( self ):\n\n assert self.issueDate\n now = int(time.time())\n\n if (now - self.issueDate) > const.SESSION_TICKET_LIFETIME:\n log.debug(\"Ticket is not valid anymore.\")\n return False\n\n return True", "def expire(self):\n Slate.expire(self)\n\n one_year = 60 * 60 * 24 * 365\n e = time.time() - one_year\n cherrypy.serving.response.cookie[self.session_cookie] = 'expired'\n cherrypy.serving.response.cookie[self.session_cookie]['expires'] = httputil.HTTPDate(e)", "def test_old_expiration(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(10, 11, 10, 8)\n assert key.audit_state == 'expire'", "def test_expired_course(self):\n CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))\n course = CourseFactory.create(start=THREE_YEARS_AGO)\n url = course_home_url(course)\n\n for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:\n CourseModeFactory.create(course_id=course.id, mode_slug=mode)\n\n # assert that an if an expired audit user tries to access the course they are redirected to the dashboard\n audit_user = UserFactory(password=self.TEST_PASSWORD)\n self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)\n audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)\n audit_enrollment.created = THREE_YEARS_AGO + timedelta(days=1)\n audit_enrollment.save()\n\n response = self.client.get(url)\n\n expiration_date = strftime_localized(course.start + timedelta(weeks=4) + timedelta(days=1), 'SHORT_DATE')\n expected_params = QueryDict(mutable=True)\n course_name = CourseOverview.get_from_id(course.id).display_name_with_default\n expected_params['access_response_error'] = 'Access to {run} expired on {expiration_date}'.format(\n run=course_name,\n expiration_date=expiration_date\n )\n expected_url = '{url}?{params}'.format(\n url=reverse('dashboard'),\n params=expected_params.urlencode()\n )\n self.assertRedirects(response, expected_url)", "def has_expired(self):\n if not self._initialized:\n return True\n\n expires_in = self.expires_in\n if expires_in > 0:\n return False\n else:\n return True", "def is_leap_year(self):\n\n yr = self.year\n if not yr%4 == 0:\n return False\n elif not yr%100 == 0: #if divisible by 4 and not divisible by 100\n return True\n elif not yr%400 == 0: #if divisible by 4, divisible by 100 and not divisible 400\n return False\n else:\n return True", "def ssl_expires_in(entity, serial_number, remaining, buffer_days=14):\n # if the cert expires in less than two weeks, we should reissue it\n if remaining < datetime.timedelta(days=0):\n # cert has already expired - uhoh!\n print(\"Cert %s issued to '%s' expired %s days ago!\"\n % (serial_number, entity, remaining.days))\n elif remaining < datetime.timedelta(days=buffer_days):\n # expires sooner than the buffer\n print(\"Cert %s issued to '%s' is nearly expired - %s more days\"\n % (serial_number, entity, remaining.days))\n else:\n # everything is fine\n print(\"Cert %s issued to '%s' is valid for %s more days\"\n % (serial_number, entity, remaining.days))", "def is_vintage(self):\n age = 2021 - self.year\n if age >= 50:\n return True\n else:\n return False", "def is_valid_year(year_range):\n\n if not year_range:\n return False\n\n if len(str(year_range)) != 8:\n return False\n\n year1 = year_range[:4]\n year2 = year_range[4:]\n\n try:\n if int(year2) - int(year1) == 1:\n if int(year1) <= int(get_current_hockey_year_start()):\n return True\n return False\n\n except Exception as e:\n print (\"inalid year passed\")\n print (str(e))\n print (traceback.print_exc())\n return False", "def test_centenary_negative():\n assert is_leap_year(2100) is False", "def has_expired(dirpath_build_cms, time_now):\n for (_, expiration, _) in _iter_expiration_files(\n os.path.join(dirpath_build_cms,\n _DIRNAME_EXPIRATION)):\n if (expiration is not None) and (expiration > time_now):\n return False\n return True", "def new_year(dacycle):\n\n this_year = dacycle['time.start'].year\n prev_year = (dacycle['time.start']-dacycle['cyclelength']).year\n\n return (this_year != prev_year)", "def check_contract_expire_soon():\n\n contract_expire_soon_list = []\n contract_expired_list = []\n\n # get user contract\n # refactoring techniques: replace temp with query\n user_role = get_user_role()\n contract_list = user_role.user_contracts\n\n for contract in contract_list:\n if contract['dateSigned'] and not contract['terminationDate']:\n\n # get expiry date and current date\n expiry_date = datetime.strptime(contract['expiryDate'][:19], \"%Y-%m-%dT%H:%M:%S\")\n current_time = datetime.now()\n \n # get the diffenrence between expiry date and current date\n difference = expiry_date - current_time\n days = divmod(difference.days, 86400)\n\n # Refactoring techniques: composing method\n contract_expire_soon = (days[1] <= 31) and (days[1] >= 0)\n contract_expired = days[0] < 0\n\n if contract_expire_soon:\n contract_expire_soon_list.append(contract)\n if contract_expired:\n contract_expired_list.append(contract)\n \n # return True if there's elem in any list, else False\n if len(contract_expire_soon_list) >= 1 or len(contract_expired_list) >= 1:\n return True, contract_expire_soon_list, contract_expired_list\n else:\n return False, contract_expire_soon_list, contract_expired_list", "def test_expired_course_in_holdback(self):\n CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))\n\n course = CourseFactory.create(start=THREE_YEARS_AGO)\n url = course_home_url(course)\n\n for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:\n CourseModeFactory.create(course_id=course.id, mode_slug=mode)\n\n # assert that an if an expired audit user in the holdback tries to access the course\n # they are not redirected to the dashboard\n audit_user = UserFactory(password=self.TEST_PASSWORD)\n self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)\n audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)\n Schedule.objects.update(start_date=THREE_YEARS_AGO)\n FBEEnrollmentExclusion.objects.create(\n enrollment=audit_enrollment\n )\n\n response = self.client.get(url)\n\n assert response.status_code == 200", "def is_vintage(self):\n return self.get_age()>=AGE", "def get_expiry():\n\n return get_or_append_details('expiry', \"Please enter your expiry date, two digits for the month and two digits for the year\")", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def get_age(self):\n return CURRENT_YEAR - self.year", "def get_age(self):\n return CURRENT_YEAR - self.year", "def getExpired(self, idle=365):\n cutOff = datetime.datetime.now() - datetime.timedelta(days=idle)\n return [x for x in self.xeps if x.status == \"Experimental\" and x.date < cutOff]", "def is_valid(self):\n return self.access_token is not None \\\n and time.time() < self._expiration_timestamp", "def _days_before_year(year):\n y = year - 1\n return y * 365 + y // 4 - y // 100 + y // 400", "def _about_to_expire(self, secret: Secret) -> bool:\n return secret.is_expired(datetime.now(UTC) + self.expiry_margin)", "def validate_iat(self, now, leeway):\n if 'iat' in self:\n iat = self['iat']\n if not _validate_numeric_time(iat):\n raise InvalidClaimError('iat')\n if iat > (now + leeway):\n raise InvalidTokenError(\n description='The token is not valid as it was issued in the future'\n )", "def getagefromyear(year=None):\n if year is None:\n print(\"Please enter the year to assign class to them\")\n try:\n t = datetime.datetime.today()\n b = datetime.datetime.strptime(str(year), '%Y')\n a = (t - b).days / 365\n a = int(a)\n if (a < 10) or (a > 80):\n a = None\n except:\n a = None\n return a", "def new_token_expiry_date():\n\treturn timezone.now() + datetime.timedelta(days=TOKEN_VALID_DATE)", "def test_negative():\n assert is_leap_year(2010) is False", "def token_is_expired(self):\n # type: () -> bool\n token = self.token\n if not token:\n return False\n\n return token[\"expires_at\"] < time()", "def test_unexpired_account_old_date_joined(self):\n self.user_info['date_joined'] = datetime_now(\n ) - timedelta(settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertFalse(profile.activation_key_expired())", "def valid_years():\n years_spent_watching = 0\n while years_spent_watching <= 0 or years_spent_watching > 60:\n try:\n years_spent_watching = int(input(\"How many years do you want to watch the gophers for? : \"))\n if years_spent_watching > 60:\n print(\"Please enter less than 60 years, you don't want to waste your life watching gophers do you?\")\n elif years_spent_watching <= 0:\n print(\"Please enter a number larger than 0 so you can actually watch the gophers.\")\n except ValueError:\n print(ValueError)\n print(\"Invalid input, please enter a number of years.\")\n return years_spent_watching", "def verify_aggPeriodOver(self):\n self.c.execute('''SELECT aggCode, aggPeriodOver FROM Agglomerations \n WHERE aggState = 1 \n AND (\n strftime('%Y', aggPeriodOver) < '1998' OR \n strftime('%Y', aggPeriodOver) >= '2023'\n )''')\n res = self.c.fetchall()\n\n if (len(res) > 0):\n return [False, \"In the agglomeration '%s' the aggPeriodOver '%s' has to be comprised between 1998 and 2023\",\n res]\n else:\n return [True]", "def LeapYear(self):\n if self.year is None:\n raise DateTimeError(\n \"Insufficient precision for leap year calculation\")\n if self.year % 4:\t\t\t# doesn't divide by 4\n return False\n elif self.year:\t\t\t# doesn't divide by 100\n return True\n elif self.century % 4: # doesn't divide by 400\n return False\n else:\n return True", "def isleapyear(yr):\n\n # TODO: MOVE all of this crap into a intelDateTime.py module. Does not belong here. JSS\n\n if yr % 400 == 0: return True\n if yr % 100 == 0: return False\n if yr % 4 == 0: return True\n return False", "def test_date_by_yr(self):\n spi_search = \"find date 2002\"\n inv_search = \"year:2002\"\n self._compare_searches(inv_search, spi_search)" ]
[ "0.69972354", "0.6889879", "0.6804469", "0.67775863", "0.67520446", "0.67160755", "0.6709071", "0.66965705", "0.6668055", "0.66046906", "0.6600447", "0.6594519", "0.65294313", "0.65037", "0.6477345", "0.647229", "0.6437253", "0.64222205", "0.64183605", "0.6384004", "0.63736963", "0.63633776", "0.63558745", "0.6350295", "0.6316659", "0.6312118", "0.6311123", "0.62814456", "0.62540513", "0.6239092", "0.6212992", "0.6207329", "0.61949086", "0.6190789", "0.6169869", "0.61671567", "0.6157718", "0.61568004", "0.6151166", "0.61502624", "0.61484987", "0.6128271", "0.6103084", "0.6096878", "0.6060056", "0.60587394", "0.60556984", "0.60468906", "0.6039146", "0.6034", "0.602962", "0.6028877", "0.6024461", "0.6020711", "0.6011749", "0.6003568", "0.6003543", "0.598814", "0.5981488", "0.5979126", "0.597442", "0.59685105", "0.5966273", "0.5964127", "0.5952935", "0.5947104", "0.5940716", "0.5936451", "0.5935674", "0.5935313", "0.59334064", "0.59175646", "0.59167045", "0.5912273", "0.59081584", "0.5904828", "0.5903274", "0.5896093", "0.58951426", "0.5878852", "0.5865946", "0.5861118", "0.58608943", "0.58604676", "0.58460385", "0.58460385", "0.5840249", "0.5837034", "0.5817724", "0.5808531", "0.5805814", "0.58056265", "0.58029604", "0.5797647", "0.57922786", "0.5789975", "0.578971", "0.57854754", "0.578464", "0.57689065", "0.57687885" ]
0.0
-1
Check expired and current years (EYear = 19)
def test_26(self): assert 'False' == Api.requestBlock('test-26')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_valid_expiration_year(expiration_year: int) -> bool:\n return expiration_year.isnumeric() and 2020 <= int(expiration_year) <= 2030", "def validate_exp_year(passport: map) -> bool:\n if passport.get('eyr'):\n if int(passport['eyr']) >= 2020 and int(passport['eyr']) <= 2030:\n return True\n\n return False", "def cc_expire_years():\n current_year = datetime.datetime.now().year\n years = range(current_year, current_year + 12)\n return [(str(x), str(x)) for x in years]", "def validate_expiration_year(expiration_year: str) -> None:\n if not 2020 <= int(expiration_year) <= 2030:\n raise ValueError(\"Expiration year is outside permissible range\")", "def expired(self):\n\n return self.getNotAfter() <= rpki.sundial.now()", "def is_expired(self):\n\n if self._lifetime is not None and self._lifetime > 0:\n # 300 seconds waite is the tolerance !\n # The unit of lifetime is millisecond\n if (time.time() - self._create_date) * 1000 > self._lifetime + 300000:\n return True\n\n return False", "def _check_goauth_expiration(self, expiry):\n now = int(time.time())\n time_left = int(expiry) - now\n # 10 days\n min_time_left = 60*60*24*10\n if time_left < min_time_left:\n return False\n else:\n return True", "def is_expired(self):\n return utcnow() >= self.expires", "def expired(self):\n return int(time.time()) > self.expires_at", "def is_expired(self) -> bool:\n return now() > self.expires", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return (self.date_joined + expiration_date <= datetime.datetime.now())", "def validate_exp(self, now, leeway):\n if 'exp' in self:\n exp = self['exp']\n if not _validate_numeric_time(exp):\n raise InvalidClaimError('exp')\n if exp < (now - leeway):\n raise ExpiredTokenError()", "def is_expired(self):\n return self.expiration_date <= self._now()", "def extended(self):\n if self.expires_at:\n return self.expires_at - self.issued_at > timedelta(days=30)\n return False", "def is_valid_year(year):\n return 1750 <= year <= 2019", "def has_expired(self):\n self.ensure_one()\n return datetime.now() > fields.Datetime.from_string(self.expires)", "def is_leap_year():", "def valid(self):\n return self.expiry > timezone.now()", "def is_access_expired(self) -> bool:\n entitlement_contract = self.cfg.entitlements.get(self.name, {})\n # TODO(No expiry per resource in MVP yet)\n expire_str = entitlement_contract.get('expires')\n if not expire_str:\n return False\n expiry = datetime.strptime(expire_str, '%Y-%m-%dT%H:%M:%S.%fZ')\n if expiry >= datetime.utcnow():\n return False\n return True", "def is_expired(self):\n if self.access_token is None:\n logging.debug('Access token not found')\n return True\n else:\n return (self.expiration <= datetime.now())", "def has_expired(self) -> bool:\n raise NotImplementedError() # pragma: nocover", "def valid_year(cls, new_year):\n if cls.MIN_YEAR <= new_year <= cls.MAX_YEAR:\n return True\n else:\n return False", "def is_expired(self):\n return timeutils.utcnow_ts() > self.expire_ts", "def is_expired(self):\n return int(time.time()) - self.time > self.interval", "def isExpired(self):\n return True/False", "def is_expired(self):\n delta = datetime.datetime.now() - self.created_at\n\n return delta.total_seconds() > 15*60", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n\n return (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def test_expires(self):\n # We aren't bother going to test the actual time in expires, that\n # way lies pain with broken tests later.\n up = self.get(self.good_data)\n hdrs = dict(up.get_headers(1))\n lm = datetime(*utils.parsedate_tz(hdrs['Last-Modified'])[:7])\n exp = datetime(*utils.parsedate_tz(hdrs['Expires'])[:7])\n assert (exp - lm).seconds == 3600", "def test_date_by_lt_yr(self):\n spi_search = \"find date < 2002\"\n inv_search = 'year:0->2002'\n self._compare_searches(inv_search, spi_search)", "def _has_expired(self):\r\n expired = False\r\n if hasattr(self, 'Expiration'):\r\n now = datetime.datetime.utcnow()\r\n expiration = datetime.datetime.strptime(self.Expiration, '%Y-%m-%dT%H:%M:%SZ')\r\n expired = (now >= expiration)\r\n else:\r\n raise ValueError(\"ERROR: Request for expired property, but no Expiration in HIT!\")\r\n return expired", "def expired(self): # pragma: no cover\n return self._state in (_State.EXPIRING, _State.EXPIRED)", "def duration(self):\n if self.is_valid:\n return relativedelta(self.expiry, datetime.date.today()).years\n else:\n return -1", "def test_date_by_gt_yr(self):\n spi_search = \"find date > 1980\"\n inv_search = 'year:1980->9999'\n self._compare_searches(inv_search, spi_search)", "def validate_issue_year(passport: map) -> bool:\n if passport.get('iyr'):\n if int(passport['iyr']) >= 2010 and int(passport['iyr']) <= 2020:\n return True\n\n return False", "def is_expired(self):\n if not self.is_signed:\n return True\n return int(self._token_claims.get(self.__class__.exp_claim, 0)) < int(\n time.time()\n )", "def check_content_expiry_date(self):\n\n if self.expire_date < datetime.now():\n self.state = 'disabled'", "def is_expired(snap):\n exp_epoch = int(snap.split(\"_\")[const.VLAB_SNAP_EXPIRES])\n current_time = int(time.time())\n return exp_epoch < current_time", "def expired(self) -> bool:\n if not self.use_wts:\n return False\n\n return datetime.now() > self.expire", "def activation_expired(self):\n return self.date_joined + timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS) < timezone.now()", "def clean_expiration_date(self):\n expiration_date = self.cleaned_data['expiration_date']\n if expiration_date.date() <= datetime.date.today():\n v_err('elapsed')\n return expiration_date", "def activation_key_expired(self):\n exp_date = timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + exp_date <= datetime.now()", "def isCurrentYear(self):\n t = time()\n return safegmtime(t + _tzoffset(self._tz, t))[0] == self._year", "def is_expired (self, now=None):\n if now is None: now = time.time()\n return self.is_idle_timed_out(now) or self.is_hard_timed_out(now)", "def _is_expired(self):\n current_time = datetime.now()\n if (current_time > self._expires_at):\n logging.debug('token expired')\n return True\n else:\n return False", "def expiry_date(self, today):\n three_years_ago = today + relativedelta(years=-3)\n three_years_in_the_future = today + relativedelta(years=+3)\n\n return date.fromordinal(random.randint(three_years_ago.toordinal(),\n three_years_in_the_future.toordinal()))", "def test_centenary_positive():\n assert is_leap_year(2400) is True", "def check(self):\n validity_year = int(self.date[0:4])\n validity_month = int(self.date[5:7])\n validity_day = int(self.date[8:10])\n if datetime.today().year > validity_year:\n self.flag = False\n elif datetime.today().year == validity_year:\n if datetime.today().month > validity_month:\n self.flag = False\n elif datetime.today().month == validity_month:\n if datetime.today().day > validity_day:\n self.flag = False\n else:\n self.flag = True\n else:\n self.flag = True\n else:\n self.flag = True", "def is_expired(self):\n\n return time.time() * 1000 - self._refreshed_on > self._expire", "def is_valid_year(year_range):\n\n if not year_range:\n return False\n\n if len(str(year_range)) != 8:\n return False\n\n year1 = year_range[:4]\n year2 = year_range[4:]\n\n try:\n if int(year2) - int(year1) == 1:\n if int(year1) <= int(get_current_hockey_year_start()):\n return True\n return False\n\n except Exception as e:\n print (\"inalid year passed\")\n print (str(e))\n print (traceback.print_exc())\n return False", "def test_positive():\n assert is_leap_year(2016) is True", "def test_expires_soon(self):\n now = timezone.now()\n window = SparkSettings().RENEW_TOKEN_WINDOW\n cur = self.factory.build(access_token='good',\n expires_at=now + timedelta(seconds=window*2))\n exp = self.factory.build(access_token='expired',\n expires_at=now + timedelta(seconds=window/2))\n self.assertFalse(cur.expires_soon())\n self.assertTrue(exp.expires_soon())", "def membership_valid(self):\n\n today = date.today()\n\n if self.dues_paid is None:\n return False\n\n months = 12 if self.dues_paid_year else 6\n dues_due = datetime.combine(self.dues_paid, datetime.min.time()) + relativedelta(months=+months)\n dues_due = dues_due.date()\n\n return dues_due > today", "def _check_leap(year):\n\n return ((year % 4) == 0) and (not(((year % 100) == 0) and ((year % 400) != 0)))", "def addThreeYears(self, today):\n\n # if today's date is any date except for 29th February (in a leap year), try block is executed, adding 3 years to current date\n try:\n threeYearsToday = today.replace(year = today.year + 3)\n return threeYearsToday\n\n # if today's date is 29th February (in a leap year), a ValueError is raised when 3 years are added because 29th Feb will only occur every 4 years\n except ValueError:\n # adds 3 years, then subtracts 1 day, so that resulting date is 28th Feb in three years, therefore expiry date will still be in the same month in 3 years time\n threeYearsToday = today.replace(year = today.year + 3, month = today.month, day = today.day - 1)\n return threeYearsToday", "def test_is_expired_time_based(self):\n expired_dt = datetime.now() + timedelta(hours=-1)\n good_dt = datetime.now() + timedelta(hours=1)\n expired_pass = DoorPassFactory.create(device=self.device, expires_at=expired_dt)\n good_pass = DoorPassFactory.create(device=self.device, expires_at=good_dt)\n self.assertTrue(expired_pass.is_expired())\n self.assertFalse(good_pass.is_expired())", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + expiration_date <= datetime.datetime.now()", "def _verify_timeout(self, doc):\n expires = doc['expires']\n if expires == 0:\n return False\n if expires >= self._time():\n return False\n return True", "def new_year(dacycle):\n\n this_year = dacycle['time.start'].year\n prev_year = (dacycle['time.start']-dacycle['cyclelength']).year\n\n return (this_year != prev_year)", "def is_expired(self) -> bool:\n if self.purpose == Purpose.password_reset:\n now = datetime.utcnow()\n expires_after = timedelta(hours=24)\n return now >= (self.created_at + expires_after)\n else:\n return False", "def is_expired(self):\n return self._bExpired", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == RegistrationProfile.ACTIVATED or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def test_centenary_negative():\n assert is_leap_year(2100) is False", "def is_leap_year(self):\n\n yr = self.year\n if not yr%4 == 0:\n return False\n elif not yr%100 == 0: #if divisible by 4 and not divisible by 100\n return True\n elif not yr%400 == 0: #if divisible by 4, divisible by 100 and not divisible 400\n return False\n else:\n return True", "def has_expired(self):\n if not self._initialized:\n return True\n\n expires_in = self.expires_in\n if expires_in > 0:\n return False\n else:\n return True", "def is_expired(self):\n if self.status == TrainingStatus.ACCEPTED:\n if not self.training_type.valid_duration:\n return False\n else:\n return self.process_datetime + self.training_type.valid_duration < timezone.now()", "def activation_key_expired(self):\r\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\r\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\r\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def get_expiry():\n\n return get_or_append_details('expiry', \"Please enter your expiry date, two digits for the month and two digits for the year\")", "def __expired_timestamp(self, timestamp):\n return int(time.time()) > timestamp + self.__ttl", "def getagefromyear(year=None):\n if year is None:\n print(\"Please enter the year to assign class to them\")\n try:\n t = datetime.datetime.today()\n b = datetime.datetime.strptime(str(year), '%Y')\n a = (t - b).days / 365\n a = int(a)\n if (a < 10) or (a > 80):\n a = None\n except:\n a = None\n return a", "def test_date_by_yr(self):\n spi_search = \"find date 2002\"\n inv_search = \"year:2002\"\n self._compare_searches(inv_search, spi_search)", "def get_age(self):\n today = datetime.now()\n return today.year \\\n - self.date_of_birth.year \\\n - ((today.month, self.date_of_birth.day) \\\n < (self.date_of_birth.month, self.date_of_birth.day))", "def test_found_all_years(self):\n ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')\n self.assertEqual(ar.years, [2008,2009])", "def expired(self):\n return rospy.get_rostime() - self.start_time > self.duration", "def verify_aggPeriodOver(self):\n self.c.execute('''SELECT aggCode, aggPeriodOver FROM Agglomerations \n WHERE aggState = 1 \n AND (\n strftime('%Y', aggPeriodOver) < '1998' OR \n strftime('%Y', aggPeriodOver) >= '2023'\n )''')\n res = self.c.fetchall()\n\n if (len(res) > 0):\n return [False, \"In the agglomeration '%s' the aggPeriodOver '%s' has to be comprised between 1998 and 2023\",\n res]\n else:\n return [True]", "def is_old(self):\n return self.age > self.lifespan", "def test_negative():\n assert is_leap_year(2010) is False", "def check_contract_expire_soon():\n\n contract_expire_soon_list = []\n contract_expired_list = []\n\n # get user contract\n # refactoring techniques: replace temp with query\n user_role = get_user_role()\n contract_list = user_role.user_contracts\n\n for contract in contract_list:\n if contract['dateSigned'] and not contract['terminationDate']:\n\n # get expiry date and current date\n expiry_date = datetime.strptime(contract['expiryDate'][:19], \"%Y-%m-%dT%H:%M:%S\")\n current_time = datetime.now()\n \n # get the diffenrence between expiry date and current date\n difference = expiry_date - current_time\n days = divmod(difference.days, 86400)\n\n # Refactoring techniques: composing method\n contract_expire_soon = (days[1] <= 31) and (days[1] >= 0)\n contract_expired = days[0] < 0\n\n if contract_expire_soon:\n contract_expire_soon_list.append(contract)\n if contract_expired:\n contract_expired_list.append(contract)\n \n # return True if there's elem in any list, else False\n if len(contract_expire_soon_list) >= 1 or len(contract_expired_list) >= 1:\n return True, contract_expire_soon_list, contract_expired_list\n else:\n return False, contract_expire_soon_list, contract_expired_list", "def get_age(date):\n today = datetime.date.today()\n return today.year - date.year - ((today.month, today.day) < (date.month, date.day))", "def valid_years():\n years_spent_watching = 0\n while years_spent_watching <= 0 or years_spent_watching > 60:\n try:\n years_spent_watching = int(input(\"How many years do you want to watch the gophers for? : \"))\n if years_spent_watching > 60:\n print(\"Please enter less than 60 years, you don't want to waste your life watching gophers do you?\")\n elif years_spent_watching <= 0:\n print(\"Please enter a number larger than 0 so you can actually watch the gophers.\")\n except ValueError:\n print(ValueError)\n print(\"Invalid input, please enter a number of years.\")\n return years_spent_watching", "def ssl_expires_in(entity, serial_number, remaining, buffer_days=14):\n # if the cert expires in less than two weeks, we should reissue it\n if remaining < datetime.timedelta(days=0):\n # cert has already expired - uhoh!\n print(\"Cert %s issued to '%s' expired %s days ago!\"\n % (serial_number, entity, remaining.days))\n elif remaining < datetime.timedelta(days=buffer_days):\n # expires sooner than the buffer\n print(\"Cert %s issued to '%s' is nearly expired - %s more days\"\n % (serial_number, entity, remaining.days))\n else:\n # everything is fine\n print(\"Cert %s issued to '%s' is valid for %s more days\"\n % (serial_number, entity, remaining.days))", "def has_expired(dirpath_build_cms, time_now):\n for (_, expiration, _) in _iter_expiration_files(\n os.path.join(dirpath_build_cms,\n _DIRNAME_EXPIRATION)):\n if (expiration is not None) and (expiration > time_now):\n return False\n return True", "def test_old_expiration(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(10, 11, 10, 8)\n assert key.audit_state == 'expire'", "def is_older_than_30(self):\n older = self.age >= 30\n return older", "def is_vintage(self):\n age = 2021 - self.year\n if age >= 50:\n return True\n else:\n return False", "def test_date_by_lt_yr_mo(self):\n spi_search = \"find date < 1978-10-21\"\n inv_search = 'year:0->1978-10-21'\n self._compare_searches(inv_search, spi_search)", "def is_valid(self):\n return self.access_token is not None \\\n and time.time() < self._expiration_timestamp", "def _days_before_year(year):\n y = year - 1\n return y * 365 + y // 4 - y // 100 + y // 400", "def isleapyear(yr):\n\n # TODO: MOVE all of this crap into a intelDateTime.py module. Does not belong here. JSS\n\n if yr % 400 == 0: return True\n if yr % 100 == 0: return False\n if yr % 4 == 0: return True\n return False", "def test_no_exception_when_from_year_before_1900(self):\n req = MockRequest(self.env, args={\n 'from': '1899-12-23',\n 'daysback': 7,\n })\n\n TimelineModule(self.env).process_request(req)\n\n self.assertIn('prev', req.chrome['links'])", "def isValid( self ):\n\n assert self.issueDate\n now = int(time.time())\n\n if (now - self.issueDate) > const.SESSION_TICKET_LIFETIME:\n log.debug(\"Ticket is not valid anymore.\")\n return False\n\n return True", "def expire(self):\n Slate.expire(self)\n\n one_year = 60 * 60 * 24 * 365\n e = time.time() - one_year\n cherrypy.serving.response.cookie[self.session_cookie] = 'expired'\n cherrypy.serving.response.cookie[self.session_cookie]['expires'] = httputil.HTTPDate(e)", "def LeapYear(self):\n if self.year is None:\n raise DateTimeError(\n \"Insufficient precision for leap year calculation\")\n if self.year % 4:\t\t\t# doesn't divide by 4\n return False\n elif self.year:\t\t\t# doesn't divide by 100\n return True\n elif self.century % 4: # doesn't divide by 400\n return False\n else:\n return True", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def is_vintage(self):\n return self.get_age()>=AGE", "def test_valid_year(self):\n ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')\n obj = ar[2009]\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsYear))", "def validate_iat(self, now, leeway):\n if 'iat' in self:\n iat = self['iat']\n if not _validate_numeric_time(iat):\n raise InvalidClaimError('iat')\n if iat > (now + leeway):\n raise InvalidTokenError(\n description='The token is not valid as it was issued in the future'\n )", "def _about_to_expire(self, secret: Secret) -> bool:\n return secret.is_expired(datetime.now(UTC) + self.expiry_margin)", "def xbrl_years(self):\n return [year for year in self.years if year >= 2021]", "def user_have_year(self, user):\n return 'bdate' in user and len(user['bdate'].split(\".\")[-1]) == 4", "def token_is_expired(self):\n # type: () -> bool\n token = self.token\n if not token:\n return False\n\n return token[\"expires_at\"] < time()", "def get_age(self):\n return CURRENT_YEAR - self.year" ]
[ "0.69665146", "0.6962839", "0.6833878", "0.67656463", "0.6731954", "0.6701308", "0.66782004", "0.6657464", "0.6603246", "0.656257", "0.6515073", "0.6505968", "0.6495955", "0.64820564", "0.6465929", "0.6452773", "0.64282364", "0.6425703", "0.64153063", "0.6412585", "0.63467216", "0.6329601", "0.63236", "0.6311746", "0.6306664", "0.62995917", "0.62915117", "0.62628454", "0.625466", "0.6254641", "0.622415", "0.6212462", "0.62080336", "0.6191802", "0.61810756", "0.61633927", "0.61309713", "0.6128621", "0.61280435", "0.6104453", "0.60911775", "0.6089544", "0.6085563", "0.60782397", "0.6057766", "0.60432327", "0.6035589", "0.6034634", "0.60258317", "0.60192007", "0.60008264", "0.5994926", "0.5991552", "0.5989564", "0.5987413", "0.59835327", "0.5972867", "0.59660274", "0.5957165", "0.59565306", "0.5953172", "0.59511", "0.5948987", "0.5942537", "0.5934865", "0.59335184", "0.59321976", "0.59259784", "0.59137535", "0.59101313", "0.59098136", "0.5902529", "0.5901", "0.5894348", "0.5891007", "0.58883595", "0.58759004", "0.58757544", "0.5871149", "0.5860719", "0.5860037", "0.58515614", "0.58503354", "0.58474964", "0.5839751", "0.58366615", "0.58363813", "0.5833188", "0.5826599", "0.582334", "0.5821317", "0.5817389", "0.5812073", "0.5809376", "0.580633", "0.5794103", "0.579222", "0.5790651", "0.57897246", "0.5783127", "0.5780911" ]
0.0
-1
Send null value in CardHolder field
def test_27(self): assert 'False' == Api.requestBlock('test-27')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_blank_value_19(field):\n if field.null:\n return None\n else:\n return ''", "def _get_blank_value_18(field):\n if field.null:\n return None\n else:\n return field.value_to_string(None)", "def testNoneValue(self):\n objectID = uuid4()\n user = createUser(u'username', u'password', u'User',\n u'user@example.com')\n namespace = createNamespace(user, u'name')\n tag = createTag(user, namespace, u'tag')\n self.store.add(TagValue(user.id, tag.id, objectID, None))", "def get_prep_value(self, value):\n if (value is UNKNOWN) or (value is ''):\n # If Django tries to save an empty string, send the db None (NULL).\n return None\n else:\n # Otherwise, just pass the value.\n return value", "def __init__(self, char_name, char_description):\r\n super().__init__(char_name, char_description)\r\n self.gift = None\r\n self.gift_conversation = None", "def __str__(self):\n return self.card_no", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def add_nil_values(self, coverage_id=None, value=None, reason=None):", "def hide_card(self):\n try:\n self.hidden_card_value = self.hand[1]\n self.hand[1] = Card()\n except IndexError:\n print('The dealer does not have enough cards!')", "def wipe_empty_fields(card):\n cardB = []\n for field in card:\n if isinstance(field, basestring):\n field = field.strip()\n if field == '':\n field = None\n cardB.append(field)\n\n i = 0\n iMax = 0\n while i < len(card):\n if cardB[i] is not None:\n iMax = i\n i += 1\n return cardB[:iMax + 1]", "def none_to_empty(data):\n return data if data is not None else ''", "def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.set_default()\n\n serializer = self.get_serializer(instance)\n return Response({'code': 0, 'card': serializer.data})", "def noneType(value):\r\n return ''", "def _set_if_not_none(self, field_key, value, verifier=str):\n\n\t\tif value is None:\n\t\t\treturn\n\n\t\tif verifier is not None:\n\t\t\tvalue = verifier(value)\n\n\t\tself.data[field_key] = value", "def _deserialize_null(self, *args):\n return None", "def test_update_field_to_null(self, field, field_name):\n control = factories.ControlFactory()\n\n response = self.api.put(control, control.id, {field: None})\n\n self.assert400(response)\n self.assertEqual(response.json[\"message\"],\n field_name + \" for the object is not specified\")\n control = db.session.query(all_models.Control).get(control.id)\n self.assertIsNotNone(control.external_id)", "def silent_none(value):\n if value is None:\n return ''\n return value", "def _hackBotchedCard(self, card, res):\n\t\tmat = re.match(r\"([^\\s=]*)\\s*=\\s*([^/]+)\", card.cardimage)\n\t\tif mat:\n\t\t\tres[mat.group(1)] = mat.group(2).strip()\n\t\telse: # Card beyond recognition, ignore\n\t\t\tpass", "def card(self, c=None):\n if c: self._card = c\n return self._card", "def __str__(self):\r\n # If the original value is None, represent this as 'NULL'\r\n if self.original is None:\r\n return 'NULL'\r\n return str(self.original)", "def _decode_none(value):\n return value", "def test_add_none_field(self):\n user_id = get_rand_string()\n data = get_rand_string()\n id = get_rand_string()\n\n doc = {}\n doc[\"user_id\"] = user_id\n doc[\"data\"] = data\n doc[\"id\"] = id\n doc[\"num\"] = None\n\n self.conn.add(**doc)", "def test_serialize_none(self):\n self.assertEqual(serialize(None), 'null')", "def field(self):\n return None", "def test_empty_value(self):\n avp_val = avp.AVP(0)\n self.assertEqual(avp_val.value, None)\n self.assertEqual(avp_val.payload, None)\n\n # We can then set its value\n avp_val.value = b''\n self.assertEqual(avp_val.value, b'')\n self.assertEqual(avp_val.payload, b'')\n\n # And unset it again\n avp_val.value = None\n self.assertEqual(avp_val.value, None)\n self.assertEqual(avp_val.payload, None)", "def test_update_risk_field_to_null(self, field, field_name):\n risk = factories.RiskFactory()\n\n response = self.api.put(risk, risk.id, {\n field: None,\n })\n\n self.assert400(response)\n self.assertEqual(response.json[\"message\"],\n field_name + \" for the object is not specified\")\n risk = db.session.query(all_models.Risk).get(risk.id)\n self.assertIsNotNone(risk.external_id)", "def demote(self):\n if self.rank != \"A\":\n raise TypeError(\"Card must be an Ace\")\n else:\n self.value = 1", "def _nullify(self, value):\n if not str(value).strip():\n return None\n else:\n return value", "def __init__(self, cvNumber='', expiryDate='', pan='', accountHolderName='', Tender=None, *args, **kw_args):\n #: The card verification number.\n self.cvNumber = cvNumber\n\n #: The date when this card expires.\n self.expiryDate = expiryDate\n\n #: The primary account number.\n self.pan = pan\n\n #: Name of account holder.\n self.accountHolderName = accountHolderName\n\n self._Tender = None\n self.Tender = Tender\n\n super(Card, self).__init__(*args, **kw_args)", "def test_get_feedback_none(self):\n result = ''\n self.xblock.credit_dict = None\n test_result = self.xblock.get_feedback_message()\n self.assertEquals(result, test_result)", "def _prepare_card(self, card_id, player_id=None):\n if player_id is None:\n player = self.p0\n else:\n player = self.game.get_player(player_id)\n player.add_mana(10, 'T')\n card, _ = player.generate(Zone.Hand, 'last', card_id)\n return card", "def test_null_field(self):\r\n problem = self.get_item_from_modulestore(self.problem_usage_key, True)\r\n self.assertIsNotNone(problem.markdown)\r\n self.client.ajax_post(\r\n self.problem_update_url,\r\n data={'nullout': ['markdown']}\r\n )\r\n problem = self.get_item_from_modulestore(self.problem_usage_key, True)\r\n self.assertIsNone(problem.markdown)", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def null_value(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"null_value\")", "def testNoneAssignment(self):\n class MyMessage(messages.Message):\n\n my_field = messages.StringField(1)\n\n m1 = MyMessage()\n m2 = MyMessage()\n m2.my_field = None\n self.assertEquals(m1, m2)", "def test_create_risk_with_empty_field(self, field):\n risk_body = self.generate_risk_body()\n risk_body[field] = None\n\n response = self.api.post(all_models.Risk, data=risk_body)\n\n self.assert400(response)", "def test_control_create_with_empty_field(self, field):\n request = self.prepare_control_request_body()\n request[field] = None\n\n response = self.api.post(all_models.Control, data=request)\n\n self.assert400(response)", "def get_card (self, card):\n\t\treturn self._card", "def clean(self, value):\n if self.null_option is not None and value == settings.FILTERS_NULL_CHOICE_VALUE:\n return None\n return super().clean(value)", "def clearField(self):\n self.field.setValue(self.default_val)", "def clearField(self):\n self.field.setValue(self.default_val)", "def _set_None(self):\n\n self.description = None\n self.func = None", "def _assignValue(value):\n if value == \"\":\n return None\n else:\n return value", "def get_prep_value(self, value):\r\n if value == \"\" or value is None:\r\n return None\r\n\r\n #if isinstance(value, dict):\r\n value = json.dumps(value, default=encode_object, ensure_ascii=False, separators=(',',':'))\r\n\r\n return super(JSONField, self).get_prep_value(value)", "def is_null(self):\n return self.value is None", "def _default_value(self):\n return None", "def __init__(self, cardholder_type: str=None, full_name: str=None, card_data: CardEmbossing=None, identity_document_number: str=None, other_identity_document_number: PersonalIdentityDocumentInfo=None, birth_date: str=None, nationality: str=None, gender: str=None, civil_status: str=None, contact_information: ContactInformation=None): # noqa: E501\n self.swagger_types = {\n 'cardholder_type': str,\n 'full_name': str,\n 'card_data': CardEmbossing,\n 'identity_document_number': str,\n 'other_identity_document_number': PersonalIdentityDocumentInfo,\n 'birth_date': str,\n 'nationality': str,\n 'gender': str,\n 'civil_status': str,\n 'contact_information': ContactInformation\n }\n\n self.attribute_map = {\n 'cardholder_type': 'cardholderType',\n 'full_name': 'fullName',\n 'card_data': 'cardData',\n 'identity_document_number': 'identityDocumentNumber',\n 'other_identity_document_number': 'otherIdentityDocumentNumber',\n 'birth_date': 'birthDate',\n 'nationality': 'nationality',\n 'gender': 'gender',\n 'civil_status': 'civilStatus',\n 'contact_information': 'contactInformation'\n }\n self._cardholder_type = cardholder_type\n self._full_name = full_name\n self._card_data = card_data\n self._identity_document_number = identity_document_number\n self._other_identity_document_number = other_identity_document_number\n self._birth_date = birth_date\n self._nationality = nationality\n self._gender = gender\n self._civil_status = civil_status\n self._contact_information = contact_information", "def _setbeneficiary_customer_no_option_59(self, val):\n self.swift_obj.BeneficiaryCustomer = val\n self.swift_obj.BeneficiaryCustomer.swiftTag = '59'", "def setNone(self):\n self.setValue([])", "def _onchange_field(self):\n if not self.secretary_contact_id:\n return\n if self.partner_type in ['dr', 'patient', 'secretary']:\n self.update({\n 'secretary_contact_id': False\n })", "def guiField(self, value):\n return None", "def prepare_value(self, value):\n if value is None and self.required:\n choices =list(self.choices)\n if len(choices) == 1:\n value = choices[0][0]\n return super(TemplateChoiceField, self).prepare_value(value)", "def get_blank(record, field_name, reason=\" in this case.\"):\n val = recordval(record, field_name)\n if val == \"\":\n return \"\"\n else:\n parser_error(\"field \"+field_name+\" must be blank\"+reason)\n return val", "def test_name_not_null(self):\n buffer = copy(self.entity1)\n buffer.name = None\n with self.assertRaises(ValidationError):\n buffer.save()\n\n transaction.rollback()", "def get_card(self):\n return self.card", "def get_card_number():\n\n return get_or_append_details('card_number', \"Please enter your credit card number\")", "def test_create_card_missing_variety(self): # pylint: disable=invalid-name\n data = {\n 'first_name': 'Ty',\n 'last_name': 'Cobb',\n }\n resp = self.app.post('cards', json=data)\n\n assert resp.status_code == 200\n\n assert data['first_name'] == resp.json['first_name']\n assert data['last_name'] == resp.json['last_name']\n assert resp.json['variety'] is None", "def clean(self):\n cleaned_data = super(AuthorizenetSurveyPurchaseForm, self).clean()\n if cleaned_data.get(\"purchase_code\"):\n return cleaned_data\n\n for f in [\"card_number\", \"card_expiry\", \"card_ccv\"]:\n if not cleaned_data.get(f):\n self.add_error(f, \"Required for card payments\")\n return cleaned_data", "def test_format_phone_none(self):\n number1 = None\n self.assertEqual(format_phone(number1), None)", "def _val_is_null(self, val):\r\n return val is None", "def test_request_channel_is_none(self):\n CanInfo.objects.filter(can_id=self.UUID).update(channel_name=None)\n self.assertFalse(send_rotate_to_can(self.USER, self.BIN_NUM))", "def get_value(self):\n return None", "def get_card_holder(self):\n\t\tif self.card_holder is not None:\n\t\t\treturn True\n\t\treturn False", "def mock_invalid_credit_card_data():\n return {\n \"CreditCardNumber\": \"1234\",\n \"CardHolder\": \"Test Name\",\n \"ExpirationDate\":\n (dt.datetime.now() + dt.timedelta(minutes=1)).isoformat(),\n \"SecurityCode\": \"1234\",\n \"Amount\": 100\n }", "def reveal_card(self):\n self.hand[1] = self.hidden_card_value\n self.hidden_card_value = Card()", "def __init__(self, cardname, amount):\n self.cardname = str(cardname)\n self.amount = int(amount)", "def temp_validator(cls, value, field):\n if value == \"U\":\n LOGGER.warning(\"{field.name} value is 'U'. Setting to None.\")\n return None\n return value", "def insert_exist(self, card):\n if card['card_ordinary'] is not None:\n slug = card['card_ordinary']['slug']\n card['card_type'] = slug\n\n # bug 149\n if slug == 'flyer':\n self.has_flyer = True\n if slug == 'test':\n self.has_test = True\n elif card['card_club'] is not None:\n slug = card['card_club']['slug']\n card['card_type'] = 'club'\n elif card['card_promo'] is not None:\n slug = card['card_promo']['slug']\n card['card_type'] = 'promo'\n else:\n raise\n\n record = []\n\n for name, delegate, title, action, static in MODEL_MAP_RAW:\n value = card.get(name, None)\n record.append(value)\n\n self.storage.append(record)", "def test_value_to_string(self):\r\n obj = self.rp\r\n field = self.rp._meta.get_field_by_name('body')[0]\r\n self.assertNotEqual(field.value_to_string(obj), u'') # expected\r\n self.assertEqual(field.value_to_string(None), u'') # edge case\r", "def add(self, card):\n if card != None:\n self.cards.append(card)", "def discard_card(self, card_index):\n if self.game.discard_card(self.index, card_index) == NO_CARD:\n self.know[card_index] = NO_CARD\n else:\n self.know[card_index] = \"??\"", "def setUpFormData(self):\n super(NoCAS, self).setUpFormData()\n self.formData['CAS_ID'] = ''", "def name(self):\n return 'Null'", "def test_none(self):\n self.assertEqual(self.obj.to_json_string(None), '[]')" ]
[ "0.5914556", "0.5837562", "0.55973685", "0.557087", "0.5547278", "0.5503616", "0.5483212", "0.5483212", "0.5483212", "0.5462632", "0.5440998", "0.54318625", "0.54166967", "0.53936404", "0.5383697", "0.5375955", "0.53558004", "0.53320044", "0.53124905", "0.5299287", "0.5295345", "0.52828413", "0.5278607", "0.5272467", "0.5259476", "0.5258622", "0.525431", "0.5206694", "0.5206518", "0.5186815", "0.5178666", "0.5165972", "0.5154745", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.51367015", "0.5135459", "0.5121847", "0.5097377", "0.5091792", "0.50891656", "0.5086285", "0.5086285", "0.5085121", "0.5077005", "0.50744396", "0.5067098", "0.50560904", "0.5053415", "0.5045235", "0.50441873", "0.50336426", "0.50314873", "0.5021839", "0.50206745", "0.50175554", "0.49993998", "0.4982633", "0.49823102", "0.4982046", "0.4980616", "0.49773085", "0.4964367", "0.49398217", "0.493674", "0.4932545", "0.49273765", "0.4913748", "0.49136388", "0.4908723", "0.49047843", "0.49024776", "0.48931357", "0.48859116", "0.4867961", "0.4864749" ]
0.0
-1