max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
1,444 | package mage.cards.d;
import mage.MageInt;
import mage.abilities.Ability;
import mage.abilities.common.EntersBattlefieldTriggeredAbility;
import mage.abilities.common.delayed.OnLeaveReturnExiledToBattlefieldAbility;
import mage.abilities.effects.OneShotEffect;
import mage.abilities.effects.common.CreateDelayedTriggeredAbilityEffect;
import mage.cards.Card;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.Outcome;
import mage.constants.SubType;
import mage.constants.TargetController;
import mage.filter.FilterPermanent;
import mage.filter.common.FilterNonlandPermanent;
import mage.filter.predicate.mageobject.NamePredicate;
import mage.filter.predicate.permanent.ControllerIdPredicate;
import mage.game.Game;
import mage.game.permanent.Permanent;
import mage.players.Player;
import mage.target.TargetPermanent;
import mage.util.CardUtil;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.UUID;
/**
* @author TheElk801
*/
public final class DeputyOfDetention extends CardImpl {
private static final FilterPermanent filter = new FilterNonlandPermanent("nonland permanent an opponent controls");
static {
filter.add(TargetController.OPPONENT.getControllerPredicate());
}
public DeputyOfDetention(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{1}{W}{U}");
this.subtype.add(SubType.VEDALKEN);
this.subtype.add(SubType.WIZARD);
this.power = new MageInt(1);
this.toughness = new MageInt(3);
// When Deputy of Detention enters the battlefield, exile target nonland permanent an opponent controls and all other nonland permanents that player controls with the same name as that permanent until Deputy of Detention leaves the battlefield.
Ability ability = new EntersBattlefieldTriggeredAbility(new DeputyOfDetentionExileEffect(), false);
ability.addTarget(new TargetPermanent(filter));
this.addAbility(ability);
}
private DeputyOfDetention(final DeputyOfDetention card) {
super(card);
}
@Override
public DeputyOfDetention copy() {
return new DeputyOfDetention(this);
}
}
class DeputyOfDetentionExileEffect extends OneShotEffect {
DeputyOfDetentionExileEffect() {
super(Outcome.Benefit);
this.staticText = "exile target nonland permanent an opponent controls " +
"and all other nonland permanents that player controls " +
"with the same name as that permanent until {this} leaves the battlefield";
}
private DeputyOfDetentionExileEffect(final DeputyOfDetentionExileEffect effect) {
super(effect);
}
@Override
public boolean apply(Game game, Ability source) {
Player controller = game.getPlayer(source.getControllerId());
Permanent permanent = game.getPermanent(source.getSourceId());
Permanent targeted = game.getPermanent(source.getFirstTarget());
if (permanent == null || controller == null || targeted == null) {
return false;
}
FilterPermanent filter = new FilterNonlandPermanent();
filter.add(new ControllerIdPredicate(targeted.getControllerId()));
filter.add(new NamePredicate(targeted.getName()));
Set<Card> toExile = new LinkedHashSet<>();
for (Permanent creature : game.getBattlefield().getActivePermanents(filter, controller.getId(), game)) {
toExile.add(creature);
}
if (!toExile.isEmpty()) {
controller.moveCardsToExile(toExile, source, game, true, CardUtil.getCardExileZoneId(game, source), permanent.getIdName());
new CreateDelayedTriggeredAbilityEffect(new OnLeaveReturnExiledToBattlefieldAbility()).apply(game, source);
}
return true;
}
@Override
public DeputyOfDetentionExileEffect copy() {
return new DeputyOfDetentionExileEffect(this);
}
}
| 1,378 |
8,747 | // Copyright 2020 Espressif Systems (Shanghai) CO LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <string.h>
#include <pthread.h>
#include <stdlib.h>
#include <unistd.h>
#include "esp32_mock.h"
typedef struct esp_netif_s esp_netif_t;
typedef struct esp_netif_ip_info esp_netif_ip_info_t;
typedef struct esp_netif_dhcp_status esp_netif_dhcp_status_t;
const char * IP_EVENT = "IP_EVENT";
esp_err_t esp_netif_add_to_list(esp_netif_t *netif)
{
return ESP_OK;
}
esp_err_t esp_netif_remove_from_list(esp_netif_t *netif)
{
return ESP_ERR_NOT_FOUND;
}
esp_netif_t* esp_netif_next(esp_netif_t* netif)
{
return NULL;
}
esp_netif_t* esp_netif_next_unsafe(esp_netif_t* netif)
{
return NULL;
}
esp_netif_t *esp_netif_get_handle_from_ifkey(const char *if_key)
{
return NULL;
}
esp_err_t esp_netif_get_ip_info(esp_netif_t *esp_netif, esp_netif_ip_info_t *ip_info)
{
return ESP_ERR_NOT_SUPPORTED;
}
esp_err_t esp_netif_dhcpc_get_status(esp_netif_t *esp_netif, esp_netif_dhcp_status_t *status)
{
return ESP_ERR_NOT_SUPPORTED;
}
| 636 |
14,668 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_VIEWS_THEME_COPYING_WIDGET_H_
#define CHROME_BROWSER_UI_VIEWS_THEME_COPYING_WIDGET_H_
#include "base/memory/raw_ptr.h"
#include "base/scoped_observation.h"
#include "ui/views/widget/widget.h"
#include "ui/views/widget/widget_observer.h"
// This widget uses a reference widget to provide its NativeTheme and
// ThemeProvider.
class ThemeCopyingWidget : public views::Widget, public views::WidgetObserver {
public:
explicit ThemeCopyingWidget(views::Widget* role_model);
ThemeCopyingWidget(const ThemeCopyingWidget&) = delete;
ThemeCopyingWidget& operator=(const ThemeCopyingWidget&) = delete;
~ThemeCopyingWidget() override;
// views::Widget:
const ui::ThemeProvider* GetThemeProvider() const override;
ui::ColorProviderManager::InitializerSupplier* GetCustomTheme()
const override;
const ui::NativeTheme* GetNativeTheme() const override;
// views::WidgetObserver:
void OnWidgetDestroying(Widget* widget) override;
void OnWidgetThemeChanged(Widget* widget) override;
private:
// The widget we'll copy our theme from.
raw_ptr<views::Widget> role_model_;
base::ScopedObservation<views::Widget, views::WidgetObserver>
observed_widget_{this};
};
#endif // CHROME_BROWSER_UI_VIEWS_THEME_COPYING_WIDGET_H_
| 465 |
2,114 | /*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <chrono>
#include <folly/CPortability.h>
namespace folly {
/// WaitOptions
///
/// Various synchronization primitives as well as various concurrent data
/// structures built using them have operations which might wait. This type
/// represents a set of options for controlling such waiting.
class WaitOptions {
public:
struct Defaults {
/// spin_max
///
/// If multiple threads are actively using a synchronization primitive,
/// whether indirectly via a higher-level concurrent data structure or
/// directly, where the synchronization primitive has an operation which
/// waits and another operation which wakes the waiter, it is common for
/// wait and wake events to happen almost at the same time. In this state,
/// we lose big 50% of the time if the wait blocks immediately.
///
/// We can improve our chances of being waked immediately, before blocking,
/// by spinning for a short duration, although we have to balance this
/// against the extra cpu utilization, latency reduction, power consumption,
/// and priority inversion effect if we end up blocking anyway.
///
/// We use a default maximum of 2 usec of spinning. As partial consolation,
/// since spinning as implemented in folly uses the pause instruction where
/// available, we give a small speed boost to the colocated hyperthread.
///
/// On circa-2013 devbox hardware, it costs about 7 usec to FUTEX_WAIT and
/// then be awoken. Spins on this hw take about 7 nsec, where all but 0.5
/// nsec is the pause instruction.
static constexpr std::chrono::nanoseconds spin_max =
std::chrono::microseconds(2);
};
std::chrono::nanoseconds spin_max() const {
return spin_max_;
}
WaitOptions& spin_max(std::chrono::nanoseconds dur) {
spin_max_ = dur;
return *this;
}
private:
std::chrono::nanoseconds spin_max_ = Defaults::spin_max;
};
} // namespace folly
| 722 |
1,755 | /*=========================================================================
Program: Visualization Toolkit
Module: vtkExtentRCBPartitioner.h
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
/**
* @class vtkExtentRCBPartitioner
* @brief This method partitions a global extent to N partitions where N is a user
* supplied parameter.
*/
#ifndef vtkExtentRCBPartitioner_h
#define vtkExtentRCBPartitioner_h
#include "vtkCommonExecutionModelModule.h" // For export macro
#include "vtkObject.h"
#include <cassert> // For assert
#include <string> // For std::string
#include <vector> // For STL vector
class VTKCOMMONEXECUTIONMODEL_EXPORT vtkExtentRCBPartitioner : public vtkObject
{
public:
static vtkExtentRCBPartitioner* New();
vtkTypeMacro(vtkExtentRCBPartitioner, vtkObject);
void PrintSelf(ostream& oss, vtkIndent indent) override;
///@{
/**
* Set/Get the number of requested partitions
*/
void SetNumberOfPartitions(const int N)
{
assert("pre: Number of partitions requested must be > 0" && (N >= 0));
this->Reset();
this->NumberOfPartitions = N;
}
///@}
///@{
/**
* Set/Get the global extent array to be partitioned.
* The global extent is packed as follows:
* [imin,imax,jmin,jmax,kmin,kmax]
*/
void SetGlobalExtent(int imin, int imax, int jmin, int jmax, int kmin, int kmax)
{
this->Reset();
this->GlobalExtent[0] = imin;
this->GlobalExtent[1] = imax;
this->GlobalExtent[2] = jmin;
this->GlobalExtent[3] = jmax;
this->GlobalExtent[4] = kmin;
this->GlobalExtent[5] = kmax;
}
void SetGlobalExtent(int ext[6])
{
this->SetGlobalExtent(ext[0], ext[1], ext[2], ext[3], ext[4], ext[5]);
}
///@}
///@{
/**
* On/Off DuplicateNodes between partitions. Default is On.
*/
vtkSetMacro(DuplicateNodes, vtkTypeBool);
vtkGetMacro(DuplicateNodes, vtkTypeBool);
vtkBooleanMacro(DuplicateNodes, vtkTypeBool);
///@}
///@{
/**
* Set/Get macro for the number of ghost layers.
*/
vtkSetMacro(NumberOfGhostLayers, int);
vtkGetMacro(NumberOfGhostLayers, int);
///@}
///@{
/**
* Returns the number of extents.
*/
vtkGetMacro(NumExtents, int);
///@}
/**
* Partitions the extent
*/
void Partition();
/**
* Returns the extent of the partition corresponding to the given ID.
*/
void GetPartitionExtent(const int idx, int ext[6]);
protected:
vtkExtentRCBPartitioner();
~vtkExtentRCBPartitioner() override;
///@{
/**
* Resets the partitioner to the initial state, all previous partition
* extents are cleared.
*/
void Reset()
{
this->PartitionExtents.clear();
this->NumExtents = 0;
this->ExtentIsPartitioned = false;
}
///@}
/**
* Given an extent, this method will create ghost layers on each side of
* the boundary in each dimension. The ghost layers however will be
* restricted to the given global extent.
*/
void ExtendGhostLayers(int ext[6]);
///@{
/**
* Givent an extent and the min/max of the dimension we are looking at, this
* method will produce a ghosted extent which is clamped within the given
* global extent
*/
void GetGhostedExtent(int ext[6], const int minIdx, const int maxIdx)
{
ext[minIdx] -= this->NumberOfGhostLayers;
ext[maxIdx] += this->NumberOfGhostLayers;
ext[minIdx] =
(ext[minIdx] < this->GlobalExtent[minIdx]) ? this->GlobalExtent[minIdx] : ext[minIdx];
ext[maxIdx] =
(ext[maxIdx] > this->GlobalExtent[maxIdx]) ? this->GlobalExtent[maxIdx] : ext[maxIdx];
}
///@}
/**
* Gets the structured data-description based on the givenn global extent
*/
void AcquireDataDescription();
/**
* Returns the extent at the position corresponding to idx.
*/
void GetExtent(const int idx, int ext[6]);
/**
* Adds the extent to the end of the list of partitioned extents
*/
void AddExtent(int ext[6]);
/**
* Replaces the extent at the position indicated by idx with the provided
* extent.
*/
void ReplaceExtent(const int idx, int ext[6]);
/**
* Splits the extent along the given dimension.
*/
void SplitExtent(int parent[6], int s1[6], int s2[6], int splitDimension);
/**
* Returns the total number of extents. It's always the 2^N where
* N is the number of subdivisions.
*/
int GetNumberOfTotalExtents();
/**
* Computes the total number of nodes for the given structured grid extent
*/
int GetNumberOfNodes(int ext[6]);
/**
* Computes the total number of cells for the given structured grid extent
*/
int GetNumberOfCells(int ext[6]);
/**
* Returns the length of the longest dimension
*/
int GetLongestDimensionLength(int ext[6]);
/**
* Returns the longest edge
*/
int GetLongestDimension(int ext[6]);
/**
* A convenience method for debugging purposes.
*/
void PrintExtent(const std::string& name, int ext[6]);
int NumberOfGhostLayers;
int DataDescription;
int GlobalExtent[6];
int NumberOfPartitions;
int NumExtents;
vtkTypeBool DuplicateNodes; // indicates whether nodes are duplicated between
// partitions, so that they are abutting. This is
// set to true by default. If disabled, the resulting
// partitions will have gaps.
bool ExtentIsPartitioned;
std::vector<int> PartitionExtents;
private:
vtkExtentRCBPartitioner(const vtkExtentRCBPartitioner&) = delete;
void operator=(const vtkExtentRCBPartitioner&) = delete;
};
#endif /* VTKEXTENTRCBPARTITIONER_H_ */
| 2,105 |
4,879 | <gh_stars>1000+
#pragma once
#include "map/catalog_headers_provider.hpp"
#include "partners_api/guides_on_map_api.hpp"
#include <memory>
class GuidesOnMapDelegate : public guides_on_map::Api::Delegate
{
public:
GuidesOnMapDelegate(std::shared_ptr<CatalogHeadersProvider> const & headersProvider);
platform::HttpClient::Headers GetHeaders() override;
private:
std::shared_ptr<CatalogHeadersProvider> m_headersProvider;
};
| 150 |
318 | <reponame>Imshuaige/zqxt
# -*- coding:utf-8 -*-
# WeizhongTu 2014.05.31
from __future__ import unicode_literals
from django.test import TestCase
from django.core.urlresolvers import reverse
from .models import Tutorial,Column
class TutorialsViews(TestCase):
def setUp(self):
# create columns:
# `test_column_1` to `test_column_100`
ColumnList = [ Column(name='test_column_%s'%i, slug='test_column_%s'%i) for i in range(1,101)]
Column.objects.bulk_create(ColumnList)
# create tutorials for test_column_1
c = Column.objects.get(slug='test_column_1')
Tutorial.objects.create(column=c, title='test tutorial 1', slug='test_index.html')
Tutorial.objects.create(column=c, title='test tutorial 2', slug='test_2.html')
Tutorial.objects.create(column=c, title='test tutorial 3', slug='test_3.html')
def test_index(self):
r = self.client.get(reverse('index'))
self.assertTrue('latest_tutorials' in r.context)
self.assertContains(r, '自强学堂')
def test_column_redirect_to_first_tutorial(self):
# test a column has tutorials, will redirect permant
response = self.client.get(reverse('column', args=('test_column_1',)))
self.assertRedirects(response,
reverse('tutorial-detail', args=('test_column_1', 'test_index.html')),
status_code=301, target_status_code=200)
def test_tutorial_basic_url(self):
import random
c,t = random.sample(xrange(1000),2)
self.assertEqual(reverse('tutorial-detail', args=('c%d' % c, 't%d.html' % t,)), '/c%d/t%d.html' % (c, t))
def test_column_and_tutorial_detail(self):
# when no tutorials in column, return 404, or else return 200
response = self.client.get(reverse('tutorial-detail',
args=('test_column_1', 'test_index.html',)))
self.assertEqual(response.status_code, 200)
self.assertTrue('tutorial' in response.context)
#self.assertEqual(response.context['tutorial'].content, u'教程正在编写中……')
def test_error_tutorial(self):
# when visit tutorial if give an error column, will redirect to right column
# no test_index.html in test_column_12, but have one in test_column_1
response = self.client.get(reverse('tutorial-detail',
args=('test_column_12', 'test_index.html',)))
self.assertRedirects(response,
reverse('tutorial-detail',args=('test_column_1', 'test_index.html')),
status_code=301, target_status_code=200)
def test_column_no_tutorial(self):
# test a column no tutorials
response = self.client.get(reverse('column', args=('test_column_100',)))
self.assertEqual(response.status_code,404)
def test_submit_error_tutorial(self):
pass
class TutorialsModels(TestCase):
pass
| 1,193 |
1,077 | <filename>Quicksilver/Code-External/NDClasses/NDResourceFork+PascalStrings.h
/*
* NDResourceFork+PascalStrings.h category
* NDResourceFork
*
* Created by <NAME> on Tue Feb 11 2003.
* Copyright (c) 2002 <NAME>. All rights reserved.
*/
/*!
@header NDResourceFork+PascalStrings
@abstract Defines the interface for a category of the class <TT>NDResourceFork</TT>.
@discussion This category was mainly added for testing purposes, it's easier to test added strings to a resource fork instead of raw data, but you may find it useful
*/
#import <Cocoa/Cocoa.h>
#import "NDResourceFork.h"
/*!
@category NDResourceFork(PascalStrings)
@abstract A category of the class <TT>NDResourceFork</TT>.
@discussion This category was mainly added for testing purposes, it's easier to test added strings to a resource fork instead of raw data, but you may find it useful
*/
@interface NDResourceFork (PascalStrings)
/*!
@method addString:type:Id:name:
@abstract Adds a string to the receivers resource file.
@discussion<TT>addString:type:name:</TT> doesn't verify whether the resource ID you pass in the parameter anID is already assigned to another resource of the same type. <TT>addString:type:Id:named:</TT> returns <TT>YES</TT> on success
@param aString An <TT>NSString</TT> object containing the string to be added as a resource to the receivers resource file.
@param aType The resource type of the resource to be added.
@param anID The resource ID of the resource to be added.
@param aName The name of the resource to be added.
@result Returns <TT>YES</TT> if the string was successfully added, otherwise it returns <TT>NO</TT>.
*/
- (BOOL)addString:(NSString *)aString type:(ResType)aType Id:(short)anID name:(NSString *)aName;
/*!
@method addString:type:name:
@abstract Adds a string to the receivers resource file.
@discussion <TT>addString:type:name:</TT> uses an unique resource ID when adding a string . <TT>addString:type:Id:named:</TT> returns <TT>YES</TT> on success
@param aString An <TT>NSString</TT> object containing the string to be added as a resource to the receivers resource file.
@param aType The resource type of the resource to be added.
@param aName The name of the resource to be added.
@result Returns <TT>YES</TT> if the resource was successfully added, otherwise it returns <TT>NO</TT>.
*/
- (BOOL)addString:(NSString *)aString type:(ResType)aType name:(NSString *)aName;
/*!
@method stringForType:Id:
@abstract Gets a resource string for a resource in the receivers resource file.
@discussion <TT>stringForType:Id:</TT> searches the receivers resource file's resource map in memory for the specified resource string.
@param aType The resource type of the resource which you wish to retrieve a string.
@param anID An integer that uniquely identifies the resource which you wish to retrieve a string.
@result Returns an <TT>NSString</TT> object if successful otherwise returns nil.
*/
- (NSString *)stringForType:(ResType)aType Id:(short)anID;
/*!
@method stringForType:named:
@abstract Gets a resource string for a resource in the receivers resource file.
@discussion <TT>stringForType:Id:</TT> searches the receivers resource file's resource map in memory for the specified resource string.
@param aType The resource type of the resourcee which you wish to retrieve a string.
@param aName A name that uniquely identifies the resource which you wish to retrieve a string. Strings passed in this parameter are case-sensitive.
@result Returns an <TT>NSString</TT> object if successful otherwise returns nil.
*/
- (NSString *)stringForType:(ResType)aType named:(NSString *)aName;
@end
| 1,042 |
5,169 | <filename>Specs/1/3/7/ZendeskConnect/2.0.0/ZendeskConnect.podspec.json
{
"name": "ZendeskConnect",
"version": "2.0.0",
"summary": "Better Messages for Web and Mobile Apps",
"description": "Connect makes it easy to send email and mobile messages based on user actions, then test how much each message helps your business.",
"homepage": "https://www.zendesk.com/",
"authors": "Zendesk",
"source": {
"git": "https://github.com/zendesk/connect-ios-sdk.git",
"tag": "2.0.0"
},
"documentation_url": "https://developer.zendesk.com/embeddables/docs/outbound/ios",
"swift_version": "4.2",
"platforms": {
"ios": "9.0"
},
"requires_arc": true,
"source_files": "ZendeskConnect/ZendeskConnect/**/*.swift",
"frameworks": "UIKit",
"license": {
"type": "Apache 2.0",
"file": "LICENSE"
}
}
| 335 |
9,136 |
#include <gtest/gtest.h>
#include "Bullet3Common/b3Logging.h"
#include "Bullet3Common/b3CommandLineArgs.h"
#include "Bullet3OpenCL/Initialize/b3OpenCLUtils.h"
#include "Bullet3OpenCL/RigidBody/kernels/solverSetup.h"
#include "Bullet3OpenCL/RigidBody/kernels/solverSetup2.h"
#include "Bullet3OpenCL/RigidBody/kernels/solveContact.h"
#include "Bullet3OpenCL/RigidBody/kernels/solveFriction.h"
#include "Bullet3OpenCL/RigidBody/kernels/batchingKernels.h"
#include "Bullet3OpenCL/RigidBody/kernels/batchingKernelsNew.h"
extern int gArgc;
extern char** gArgv;
namespace
{
struct CompileBullet3PgsContactSolverKernels : public ::testing::Test
{
cl_context m_clContext;
cl_device_id m_clDevice;
cl_command_queue m_clQueue;
char* m_clDeviceName;
cl_platform_id m_platformId;
CompileBullet3PgsContactSolverKernels()
: m_clDeviceName(0),
m_clContext(0),
m_clDevice(0),
m_clQueue(0),
m_platformId(0)
{
// You can do set-up work for each test here.
b3CommandLineArgs args(gArgc, gArgv);
int preferredDeviceIndex = -1;
int preferredPlatformIndex = -1;
bool allowCpuOpenCL = false;
initCL();
}
virtual ~CompileBullet3PgsContactSolverKernels()
{
// You can do clean-up work that doesn't throw exceptions here.
exitCL();
}
// If the constructor and destructor are not enough for setting up
// and cleaning up each test, you can define the following methods:
#include "initCL.h"
virtual void SetUp()
{
// Code here will be called immediately after the constructor (right
// before each test).
}
virtual void TearDown()
{
// Code here will be called immediately after each test (right
// before the destructor).
}
};
TEST_F(CompileBullet3PgsContactSolverKernels, solveFrictionCL)
{
const char* additionalMacros = "";
cl_int errNum = 0;
cl_program solveFrictionProg = b3OpenCLUtils::compileCLProgramFromString(m_clContext, m_clDevice, solveFrictionCL, &errNum, additionalMacros, 0, true);
ASSERT_EQ(CL_SUCCESS, errNum);
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solveFrictionCL, "BatchSolveKernelFriction", &errNum, solveFrictionProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solveFrictionCL, "solveSingleFrictionKernel", &errNum, solveFrictionProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
clReleaseProgram(solveFrictionProg);
}
TEST_F(CompileBullet3PgsContactSolverKernels, solverSetupCL)
{
const char* additionalMacros = "";
cl_int errNum = 0;
cl_program solverSetupProg = b3OpenCLUtils::compileCLProgramFromString(m_clContext, m_clDevice, solverSetupCL, &errNum, additionalMacros, 0, true);
ASSERT_EQ(CL_SUCCESS, errNum);
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetupCL, "ContactToConstraintKernel", &errNum, solverSetupProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
clReleaseProgram(solverSetupProg);
}
TEST_F(CompileBullet3PgsContactSolverKernels, solverSetup2CL)
{
const char* additionalMacros = "";
cl_int errNum = 0;
cl_program solverSetup2Prog = b3OpenCLUtils::compileCLProgramFromString(m_clContext, m_clDevice, solverSetup2CL, &errNum, additionalMacros, 0, true);
ASSERT_EQ(CL_SUCCESS, errNum);
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "SetSortDataKernel", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "SetDeterminismSortDataBodyA", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "SetDeterminismSortDataBodyB", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "SetDeterminismSortDataChildShapeA", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "SetDeterminismSortDataChildShapeB", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "ReorderContactKernel", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solverSetup2CL, "CopyConstraintKernel", &errNum, solverSetup2Prog, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
clReleaseProgram(solverSetup2Prog);
}
TEST_F(CompileBullet3PgsContactSolverKernels, solveContactCL)
{
const char* additionalMacros = "";
cl_int errNum = 0;
cl_program solveContactProg = b3OpenCLUtils::compileCLProgramFromString(m_clContext, m_clDevice, solveContactCL, &errNum, additionalMacros, 0, true);
ASSERT_EQ(CL_SUCCESS, errNum);
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solveContactCL, "BatchSolveKernelContact", &errNum, solveContactProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, solveContactCL, "solveSingleContactKernel", &errNum, solveContactProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
clReleaseProgram(solveContactProg);
}
TEST_F(CompileBullet3PgsContactSolverKernels, batchingKernelsCL)
{
const char* additionalMacros = "";
cl_int errNum = 0;
cl_program batchingProg = b3OpenCLUtils::compileCLProgramFromString(m_clContext, m_clDevice, batchingKernelsCL, &errNum, additionalMacros, 0, true);
ASSERT_EQ(CL_SUCCESS, errNum);
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, batchingKernelsCL, "CreateBatches", &errNum, batchingProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
clReleaseProgram(batchingProg);
}
TEST_F(CompileBullet3PgsContactSolverKernels, batchingKernelsNewCL)
{
const char* additionalMacros = "";
cl_int errNum = 0;
cl_program batchingNewProg = b3OpenCLUtils::compileCLProgramFromString(m_clContext, m_clDevice, batchingKernelsNewCL, &errNum, additionalMacros, 0, true);
ASSERT_EQ(CL_SUCCESS, errNum);
{
cl_kernel k = b3OpenCLUtils::compileCLKernelFromString(m_clContext, m_clDevice, batchingKernelsNewCL, "CreateBatchesNew", &errNum, batchingNewProg, additionalMacros);
ASSERT_EQ(CL_SUCCESS, errNum);
ASSERT_FALSE(k == 0);
clReleaseKernel(k);
}
clReleaseProgram(batchingNewProg);
}
}; // namespace
| 2,951 |
629 | <gh_stars>100-1000
#include "libcompat.h"
int timer_create(clockid_t clockid CK_ATTRIBUTE_UNUSED,
struct sigevent *sevp CK_ATTRIBUTE_UNUSED,
timer_t * timerid CK_ATTRIBUTE_UNUSED)
{
/*
* The create function does nothing. timer_settime will use
* alarm to set the timer, and timer_delete will stop the
* alarm
*/
return 0;
}
| 179 |
2,151 | <filename>third_party/android_tools/sdk/sources/android-25/com/android/settingslib/accessibility/AccessibilityUtils.java
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.settingslib.accessibility;
import android.accessibilityservice.AccessibilityServiceInfo;
import android.content.ComponentName;
import android.content.Context;
import android.content.pm.ResolveInfo;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.os.UserHandle;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.ArraySet;
import android.view.accessibility.AccessibilityManager;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
public class AccessibilityUtils {
public static final char ENABLED_ACCESSIBILITY_SERVICES_SEPARATOR = ':';
final static TextUtils.SimpleStringSplitter sStringColonSplitter =
new TextUtils.SimpleStringSplitter(ENABLED_ACCESSIBILITY_SERVICES_SEPARATOR);
/**
* @return the set of enabled accessibility services. If there are no services,
* it returns the unmodifiable {@link Collections#emptySet()}.
*/
public static Set<ComponentName> getEnabledServicesFromSettings(Context context) {
return getEnabledServicesFromSettings(context, UserHandle.myUserId());
}
/**
* @return the set of enabled accessibility services for {@param userId}. If there are no
* services, it returns the unmodifiable {@link Collections#emptySet()}.
*/
public static Set<ComponentName> getEnabledServicesFromSettings(Context context, int userId) {
final String enabledServicesSetting = Settings.Secure.getStringForUser(
context.getContentResolver(), Settings.Secure.ENABLED_ACCESSIBILITY_SERVICES,
userId);
if (enabledServicesSetting == null) {
return Collections.emptySet();
}
final Set<ComponentName> enabledServices = new HashSet<>();
final TextUtils.SimpleStringSplitter colonSplitter = sStringColonSplitter;
colonSplitter.setString(enabledServicesSetting);
while (colonSplitter.hasNext()) {
final String componentNameString = colonSplitter.next();
final ComponentName enabledService = ComponentName.unflattenFromString(
componentNameString);
if (enabledService != null) {
enabledServices.add(enabledService);
}
}
return enabledServices;
}
/**
* @return a localized version of the text resource specified by resId
*/
public static CharSequence getTextForLocale(Context context, Locale locale, int resId) {
final Resources res = context.getResources();
final Configuration config = new Configuration(res.getConfiguration());
config.setLocale(locale);
final Context langContext = context.createConfigurationContext(config);
return langContext.getText(resId);
}
/**
* Changes an accessibility component's state.
*/
public static void setAccessibilityServiceState(Context context, ComponentName toggledService,
boolean enabled) {
setAccessibilityServiceState(context, toggledService, enabled, UserHandle.myUserId());
}
/**
* Changes an accessibility component's state for {@param userId}.
*/
public static void setAccessibilityServiceState(Context context, ComponentName toggledService,
boolean enabled, int userId) {
// Parse the enabled services.
Set<ComponentName> enabledServices = AccessibilityUtils.getEnabledServicesFromSettings(
context, userId);
if (enabledServices.isEmpty()) {
enabledServices = new ArraySet<>(1);
}
// Determine enabled services and accessibility state.
boolean accessibilityEnabled = false;
if (enabled) {
enabledServices.add(toggledService);
// Enabling at least one service enables accessibility.
accessibilityEnabled = true;
} else {
enabledServices.remove(toggledService);
// Check how many enabled and installed services are present.
Set<ComponentName> installedServices = getInstalledServices(context);
for (ComponentName enabledService : enabledServices) {
if (installedServices.contains(enabledService)) {
// Disabling the last service disables accessibility.
accessibilityEnabled = true;
break;
}
}
}
// Update the enabled services setting.
StringBuilder enabledServicesBuilder = new StringBuilder();
// Keep the enabled services even if they are not installed since we
// have no way to know whether the application restore process has
// completed. In general the system should be responsible for the
// clean up not settings.
for (ComponentName enabledService : enabledServices) {
enabledServicesBuilder.append(enabledService.flattenToString());
enabledServicesBuilder.append(
AccessibilityUtils.ENABLED_ACCESSIBILITY_SERVICES_SEPARATOR);
}
final int enabledServicesBuilderLength = enabledServicesBuilder.length();
if (enabledServicesBuilderLength > 0) {
enabledServicesBuilder.deleteCharAt(enabledServicesBuilderLength - 1);
}
Settings.Secure.putStringForUser(context.getContentResolver(),
Settings.Secure.ENABLED_ACCESSIBILITY_SERVICES,
enabledServicesBuilder.toString(), userId);
}
private static Set<ComponentName> getInstalledServices(Context context) {
final Set<ComponentName> installedServices = new HashSet<>();
installedServices.clear();
final List<AccessibilityServiceInfo> installedServiceInfos =
AccessibilityManager.getInstance(context)
.getInstalledAccessibilityServiceList();
if (installedServiceInfos == null) {
return installedServices;
}
for (final AccessibilityServiceInfo info : installedServiceInfos) {
final ResolveInfo resolveInfo = info.getResolveInfo();
final ComponentName installedService = new ComponentName(
resolveInfo.serviceInfo.packageName,
resolveInfo.serviceInfo.name);
installedServices.add(installedService);
}
return installedServices;
}
}
| 2,556 |
5,129 | <reponame>Maria-philna/unilm<filename>infoxlm/src-infoxlm/infoxlm/__init__.py
import infoxlm.tasks
import infoxlm.models
import infoxlm.criterions | 65 |
1,056 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.debugger.jpda.models;
import com.sun.jdi.AbsentInformationException;
import com.sun.jdi.IncompatibleThreadStateException;
import com.sun.jdi.InvalidStackFrameException;
import com.sun.jdi.Location;
import com.sun.jdi.Method;
import com.sun.jdi.ObjectReference;
import com.sun.jdi.StackFrame;
import com.sun.jdi.StringReference;
import com.sun.jdi.ThreadGroupReference;
import com.sun.jdi.ThreadReference;
import com.sun.jdi.VMDisconnectedException;
import com.sun.jdi.VirtualMachine;
import com.sun.jdi.event.Event;
import com.sun.jdi.request.BreakpointRequest;
import com.sun.jdi.request.EventRequest;
import com.sun.jdi.request.EventRequestManager;
import com.sun.jdi.request.StepRequest;
import java.beans.Customizer;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.beans.PropertyVetoException;
import java.beans.VetoableChangeListener;
import java.beans.beancontext.BeanContext;
import java.beans.beancontext.BeanContextChild;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.netbeans.api.debugger.DebuggerManager;
import org.netbeans.api.debugger.Session;
import org.netbeans.api.debugger.jpda.CallStackFrame;
import org.netbeans.api.debugger.jpda.JPDABreakpoint;
import org.netbeans.api.debugger.jpda.JPDADebugger;
import org.netbeans.api.debugger.jpda.JPDAThread;
import org.netbeans.api.debugger.jpda.JPDAThreadGroup;
import org.netbeans.api.debugger.jpda.MonitorInfo;
import org.netbeans.api.debugger.jpda.ObjectVariable;
import org.netbeans.api.debugger.jpda.Variable;
import org.netbeans.api.debugger.jpda.event.JPDABreakpointEvent;
import org.netbeans.modules.debugger.jpda.JPDADebuggerImpl;
import org.netbeans.modules.debugger.jpda.SingleThreadWatcher;
import org.netbeans.modules.debugger.jpda.impl.StepUtils;
import org.netbeans.modules.debugger.jpda.jdi.IllegalThreadStateExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.InternalExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.InvalidRequestStateExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.InvalidStackFrameExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.LocationWrapper;
import org.netbeans.modules.debugger.jpda.jdi.MethodWrapper;
import org.netbeans.modules.debugger.jpda.jdi.MirrorWrapper;
import org.netbeans.modules.debugger.jpda.jdi.MonitorInfoWrapper;
import org.netbeans.modules.debugger.jpda.jdi.NativeMethodExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.ObjectCollectedExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.ObjectReferenceWrapper;
import org.netbeans.modules.debugger.jpda.jdi.ReferenceTypeWrapper;
import org.netbeans.modules.debugger.jpda.jdi.StackFrameWrapper;
import org.netbeans.modules.debugger.jpda.jdi.ThreadReferenceWrapper;
import org.netbeans.modules.debugger.jpda.jdi.TypeComponentWrapper;
import org.netbeans.modules.debugger.jpda.jdi.VMDisconnectedExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.VMOutOfMemoryExceptionWrapper;
import org.netbeans.modules.debugger.jpda.jdi.VirtualMachineWrapper;
import org.netbeans.modules.debugger.jpda.jdi.event.EventWrapper;
import org.netbeans.modules.debugger.jpda.jdi.request.BreakpointRequestWrapper;
import org.netbeans.modules.debugger.jpda.jdi.request.EventRequestManagerWrapper;
import org.netbeans.modules.debugger.jpda.jdi.request.EventRequestWrapper;
import org.netbeans.modules.debugger.jpda.jdi.request.MonitorContendedEnteredRequestWrapper;
import org.netbeans.modules.debugger.jpda.jdi.request.StepRequestWrapper;
import org.netbeans.modules.debugger.jpda.util.Executor;
import org.netbeans.modules.debugger.jpda.util.Operator;
import org.netbeans.spi.debugger.jpda.EditorContext.Operation;
import org.openide.util.Exceptions;
import org.openide.util.Mutex;
import org.openide.util.NbBundle;
import org.openide.util.Pair;
/**
* The implementation of JPDAThread.
*/
public final class JPDAThreadImpl implements JPDAThread, Customizer, BeanContextChild {
private static final String PROP_LOCKER_THREADS = "lockerThreads"; // NOI18N
private static final String PROP_STEP_SUSPENDED_BY_BREAKPOINT = "stepSuspendedByBreakpoint"; // NOI18N
/**
* Name of a property change event, that is fired when the current operation
* or last operations change. It's intentionally fired to dedicated property
* change listeners only, when added for this property specifically.
* It's intentionally fired synchronously with the operation change, under
* the thread lock.
*/
public static final String PROP_OPERATIONS_SET = "operationsSet"; // NOI18N
public static final String PROP_OPERATIONS_UPDATE = "operationsUpdate"; // NOI18N
private static final Logger logger = Logger.getLogger(JPDAThreadImpl.class.getName()); // NOI18N
private static final Logger loggerS = Logger.getLogger(JPDAThreadImpl.class.getName()+".suspend"); // NOI18N
private final ThreadReference threadReference;
private final JPDADebuggerImpl debugger;
/** Thread is suspended and everybody know about this. */
private boolean suspended;
private boolean suspendedOnAnEvent; // Suspended by an event that occured in this thread
/** Thread is suspended, but only this class knows it.
A notification about real suspend or resume is expected to come soon.
Typically just some evaluation, which will decide what's going to be done next, is just being performed.
We do not notify anyone about this, in order not to trigger unnecessary work (refreshing variables view, thread stack frames, etc.).*/
private boolean suspendedNoFire;
/** Suspend was requested while this thread was suspended, but looked like running to others. */
private boolean suspendRequested;
private boolean initiallySuspended;
private int suspendCount;
private Operation currentOperation;
private List<Operation> lastOperations;
private boolean doKeepLastOperations;
private ReturnVariableImpl returnVariable;
private PropertyChangeSupport pch = new PropertyChangeSupport(this);
private PropertyChangeSupport operationsPch = new PropertyChangeSupport(this);
// There's a caching mechanism in ThreadReferenceImpl in JDK 7
// However, the stack depth query is not synchronized, therefore we cache it
// for efficiency here.
private int stackDepth = -1;
private CallStackFrame[] cachedFrames;
private int cachedFramesFrom = -1;
private int cachedFramesTo = -1;
private final Object cachedFramesLock = new Object();
private JPDABreakpoint currentBreakpoint;
private JPDABreakpointEvent breakpointEvent;
private String threadName;
private final Object lockerThreadsLock = new Object();
//private Map<JPDAThread, Variable> lockerThreads;
//private Map<ThreadReference, ObjectReference> lockerThreads2;
private ObjectReference lockerThreadsMonitor;
private List<JPDAThread> lockerThreadsList;
private List<ThreadReference> resumedBlockingThreads;
private final Object stepBreakpointLock = new Object();
/** Step in some thread got suspended by this breakpoint hit in this thread. */
private JPDABreakpoint stepSuspendedByBreakpoint;
/** A set of threads in which a step is pending and which got suspended
* by a breakpoint hit in this thread. */
private Set<JPDAThreadImpl> suspendedSteppingThreads;
/** A set of threads which hit breakpoints that suspended a step in this thread. */
private Set<JPDAThreadImpl> steppingSuspendedByBptsInThreads;
private VirtualMachine vm;
/** Lock under which we're safe to suspend this thread for the purpose of checking for monitors. */
private final Object suspendToCheckForMonitorsLock = new Object();
private boolean canSuspendToCheckForMonitors;
public final ReadWriteLock accessLock = new ThreadReentrantReadWriteLock();
private final Object ownedMonitorsAndFramesSingleAccessLock = new Object();
public JPDAThreadImpl (
ThreadReference threadReference,
JPDADebuggerImpl debugger
) {
this.threadReference = threadReference;
this.debugger = debugger;
boolean initFailed = false;
threadName = "";
try {
vm = MirrorWrapper.virtualMachine(threadReference);
threadName = ThreadReferenceWrapper.name(threadReference);
suspended = ThreadReferenceWrapper.isSuspended(threadReference);
if (suspended) {
suspendCount = ThreadReferenceWrapper.suspendCount(threadReference);
} else {
suspendCount = 0;
}
initiallySuspended = suspended;
} catch (IllegalThreadStateExceptionWrapper itsex) {
initFailed = true;
} catch (ObjectCollectedExceptionWrapper ex) {
initFailed = true;
} catch (VMDisconnectedExceptionWrapper ex) {
initFailed = true;
} catch (InternalExceptionWrapper ex) {
initFailed = true;
}
if (initFailed) {
suspended = false;
suspendCount = 0;
}
}
@Override
public Lock getReadAccessLock() {
return accessLock.readLock();
}
/**
* Getter for the name of thread property.
*
* @return name of thread.
*/
@Override
public String getName () {
return threadName;
}
/**
* Returns parent thread group.
*
* @return parent thread group.
*/
@Override
public JPDAThreadGroup getParentThreadGroup () {
try {
ThreadGroupReference tgr = ThreadReferenceWrapper.threadGroup (threadReference);
if (tgr == null) return null;
return debugger.getThreadGroup(tgr);
} catch (IllegalThreadStateExceptionWrapper ex) {
return null; // Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
return null;
} catch (VMDisconnectedExceptionWrapper ex) {
return null;
} catch (VMOutOfMemoryExceptionWrapper ex) {
return null;
} catch (InternalExceptionWrapper ex) {
return null;
}
}
/**
* Returns line number of the location this thread stopped at.
* The thread should be suspended at the moment this method is called.
*
* @return line number of the current location if the thread is suspended,
* contains at least one frame and the topmost frame does not
* represent a native method invocation; <CODE>-1</CODE> otherwise
* @see CallStackFrame
*/
@Override
public int getLineNumber (String stratum) {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
try {
CallStackFrame[] topStackRef = getCallStack(0, 1);
if (topStackRef.length > 0) {
return topStackRef[0].getLineNumber(stratum);
}
} catch (AbsentInformationException aiex) {}
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
if (stackDepth < 1) return -1;
}
return LocationWrapper.lineNumber(StackFrameWrapper.location(
ThreadReferenceWrapper.frame(threadReference, 0)), stratum);
}
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InvalidStackFrameExceptionWrapper ex) {
} catch (IncompatibleThreadStateException ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
} finally {
accessLock.readLock().unlock();
}
return -1;
}
public Method getTopMethod() {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
if (stackDepth < 1) return null;
}
return LocationWrapper.method(StackFrameWrapper.location(
ThreadReferenceWrapper.frame(threadReference, 0)));
}
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InvalidStackFrameExceptionWrapper ex) {
} catch (IncompatibleThreadStateException ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
} finally {
accessLock.readLock().unlock();
}
return null;
}
@Override
public synchronized Operation getCurrentOperation() {
return currentOperation;
}
public synchronized void setCurrentOperation(Operation operation) { // Set the current operation for the default stratum.
this.currentOperation = operation;
fireOperationsChanged(PROP_OPERATIONS_SET);
}
@Override
public synchronized List<Operation> getLastOperations() {
return lastOperations;
}
public synchronized void addLastOperation(Operation operation) {
if (lastOperations == null) {
lastOperations = new ArrayList<Operation>();
}
lastOperations.add(operation);
fireOperationsChanged(PROP_OPERATIONS_SET);
}
public synchronized void updateLastOperation(Operation operation) {
this.currentOperation = operation;
if (lastOperations == null) {
lastOperations = new ArrayList<Operation>();
}
fireOperationsChanged(PROP_OPERATIONS_UPDATE);
}
public synchronized void clearLastOperations() {
if (lastOperations != null) {
for (Operation last : lastOperations) {
last.setReturnValue(null); // reset the returned value.
// Operation might be reused, but the execution path is gone.
}
}
lastOperations = null;
fireOperationsChanged(PROP_OPERATIONS_SET);
}
public synchronized void holdLastOperations(boolean doHold) {
doKeepLastOperations = doHold;
}
@Override
public synchronized JPDABreakpoint getCurrentBreakpoint() {
if (currentBreakpoint != null && currentBreakpoint.isHidden()) {
return null;
} else {
return currentBreakpoint;
}
}
public synchronized JPDABreakpointEvent getCurrentBreakpointEvent() {
return breakpointEvent;
}
public void setCurrentBreakpoint(JPDABreakpoint currentBreakpoint, JPDABreakpointEvent breakpointEvent) {
JPDABreakpoint oldBreakpoint;
synchronized (this) {
oldBreakpoint = this.currentBreakpoint;
this.currentBreakpoint = currentBreakpoint;
this.breakpointEvent = breakpointEvent;
}
pch.firePropertyChange(JPDAThread.PROP_BREAKPOINT, oldBreakpoint, currentBreakpoint);
}
/**
* Returns current state of this thread.
*
* @return current state of this thread
*/
@Override
public int getState () {
try {
return ThreadReferenceWrapper.status (threadReference);
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
}
return STATE_UNKNOWN;
}
/**
* Returns true if this thread is suspended by debugger.
*
* @return true if this thread is suspended by debugger
*/
@Override
public boolean isSuspended () {
return suspended;
}
/**
* Returns true if this thread is temporarily suspended by debugger to process events.
*
* @return true if this thread is suspended by debugger
*/
public boolean isSuspendedNoFire () {
return suspendedNoFire;
}
/**
* Returns true if the JPDA thread is suspended by debugger.
*
* @return true if this thread is suspended by debugger
*/
public boolean isThreadSuspended () {
try {
return ThreadReferenceWrapper.isSuspended (threadReference);
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
}
return false;
}
/**
* Test if this thread was suspended on an event that occurred in this thread.
* This flag survives method invocations.
*
* @return true if this thread is suspended on an event.
*/
public boolean isSuspendedOnAnEvent() {
return suspendedOnAnEvent;
}
/**
* If this thread is suspended returns class name where this thread is stopped.
*
* @return class name where this thread is stopped.
*/
@Override
public String getClassName () {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
if (stackDepth < 1) return "";
}
return ReferenceTypeWrapper.name(LocationWrapper.declaringType(
StackFrameWrapper.location(ThreadReferenceWrapper.frame(threadReference, 0))));
}
} catch (InternalExceptionWrapper ex) {
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InvalidStackFrameExceptionWrapper ex) {
} catch (IncompatibleThreadStateException ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (VMDisconnectedExceptionWrapper ex) {
} finally {
accessLock.readLock().unlock();
}
return "";
}
/**
* If this thread is suspended returns method name where this thread is stopped.
*
* @return method name where this thread is stopped.
*/
@Override
public String getMethodName () {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
if (stackDepth < 1) return "";
}
return TypeComponentWrapper.name(LocationWrapper.method(
StackFrameWrapper.location(ThreadReferenceWrapper.frame(threadReference, 0))));
}
} catch (InternalExceptionWrapper ex) {
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InvalidStackFrameExceptionWrapper ex) {
} catch (IncompatibleThreadStateException ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (VMDisconnectedExceptionWrapper ex) {
} finally {
accessLock.readLock().unlock();
}
return "";
}
/**
* Returns name of file of this frame or null if thread has no frame.
*
* @return Returns name of file of this frame.
*/
@Override
public String getSourceName (String stratum) throws AbsentInformationException {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
if (stackDepth < 1) return "";
}
return LocationWrapper.sourceName(StackFrameWrapper.location(ThreadReferenceWrapper.frame(threadReference, 0)), stratum);
}
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InvalidStackFrameExceptionWrapper ex) {
} catch (IncompatibleThreadStateException ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
} finally {
accessLock.readLock().unlock();
}
return "";
}
/**
* Returns name of file of this frame or null if thread has no frame.
*
* @return Returns name of file of this frame.
*/
@Override
public String getSourcePath (String stratum)
throws AbsentInformationException {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
if (stackDepth < 1) return "";
}
return LocationWrapper.sourcePath(StackFrameWrapper.location(ThreadReferenceWrapper.frame(threadReference, 0)), stratum);
}
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InvalidStackFrameExceptionWrapper ex) {
} catch (IncompatibleThreadStateException ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
} finally {
accessLock.readLock().unlock();
}
return "";
}
/**
* Returns call stack for this thread.
*
* @throws AbsentInformationException if the thread is running or not able
* to return callstack. If the thread is in an incompatible state
* (e.g. running), the AbsentInformationException has
* IncompatibleThreadStateException as a cause.
* If the thread is collected, the AbsentInformationException has
* ObjectCollectedException as a cause.
* @return call stack
*/
@Override
public CallStackFrame[] getCallStack () throws AbsentInformationException {
accessLock.readLock().lock();
try {
return getCallStack (0, getStackDepth ());
} finally {
accessLock.readLock().unlock();
}
}
/**
* Returns call stack for this thread on the given indexes.
*
* @param from a from index, inclusive
* @param to a to index, exclusive
* @throws AbsentInformationException if the thread is running or not able
* to return callstack. If the thread is in an incompatible state
* (e.g. running), the AbsentInformationException has
* IncompatibleThreadStateException as a cause.
* If the thread is collected, the AbsentInformationException has
* ObjectCollectedException as a cause.
* @return call stack
*/
@Override
public CallStackFrame[] getCallStack (int from, int to)
throws AbsentInformationException {
accessLock.readLock().lock();
try {
if (!(suspended || suspendedNoFire)) {
return new CallStackFrame[0];
}
List l;
CallStackFrame[] theCachedFrames = null;
CallStackFrame[] frames;
// synchronize the whole retrieval of frames to prevent from concurrent retrieval of the same frames.
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount(threadReference);
}
int max = stackDepth;
if (to < 0) to = max; // Fight strange negative frame counts from http://www.netbeans.org/issues/show_bug.cgi?id=162448
from = Math.min(from, max);
to = Math.min(to, max);
if (to - from > 1) {
if (from == cachedFramesFrom && to == cachedFramesTo) {
return cachedFrames;
}
if (from >= cachedFramesFrom && to <= cachedFramesTo) {
return Arrays.copyOfRange(cachedFrames, from - cachedFramesFrom, to - cachedFramesFrom);
}
if (cachedFramesFrom >= 0 && cachedFramesTo > cachedFramesFrom) {
int length = to - from;
theCachedFrames = new CallStackFrame[length];
for (int i = 0; i < length; i++) {
if (i >= cachedFramesFrom && i < cachedFramesTo) {
theCachedFrames[i] = cachedFrames[i - cachedFramesFrom];
} else {
theCachedFrames[i] = null;
}
}
}
}
if (from < 0) {
throw new IndexOutOfBoundsException("from = "+from);
}
if (from == to) {
return new CallStackFrame[0];
}
if (from >= max) {
throw new IndexOutOfBoundsException("from = "+from+" is too high, frame count = "+max);
}
int length = to - from;
if (length < 0 || (from+length) > max) {
throw new IndexOutOfBoundsException("from = "+from+", to = "+to+", frame count = "+max);
}
l = null;
try {
l = ThreadReferenceWrapper.frames (threadReference, from, length);
} catch (IndexOutOfBoundsException ioobex) {
ioobex = Exceptions.attachMessage(ioobex, "from = "+from+", to = "+to+", frame count = "+max+", length = "+length+", fresh frame count = "+ThreadReferenceWrapper.frameCount(threadReference));
// Terrible attempt to hack a magic issue
while (length > 0) {
// Try to obtain at least something...
length--;
to--;
try {
l = ThreadReferenceWrapper.frames (threadReference, from, length);
break;
} catch (IndexOutOfBoundsException ioobex2) {
}
}
ioobex = Exceptions.attachMessage(ioobex, "Finally got "+length+" frames from "+threadReference);
logger.log(Level.INFO, "Stack frames "+to+" - "+max+" can not be retrieved from thread "+threadReference, ioobex);
}
if (l == null) {
l = java.util.Collections.emptyList();
}
int n = l.size();
frames = new CallStackFrame[n];
for (int i = 0; i < n; i++) {
if (theCachedFrames != null && theCachedFrames[i] != null) {
frames[i] = theCachedFrames[i];
} else {
frames[i] = new CallStackFrameImpl(this, (StackFrame) l.get(i), from + i, debugger);
}
if (from == 0 && i == 0 && currentOperation != null) {
((CallStackFrameImpl) frames[i]).setCurrentOperation(currentOperation);
}
}
cachedFrames = frames;
cachedFramesFrom = from;
cachedFramesTo = to;
}
return frames;
} catch (IncompatibleThreadStateException ex) {
String msg = ex.getLocalizedMessage() + " " + getThreadStateLog();
AbsentInformationException aiex = new AbsentInformationException(msg);
aiex.initCause(ex);
throw aiex;
} catch (InvalidStackFrameException ex) {
AbsentInformationException aiex = new AbsentInformationException(ex.getLocalizedMessage());
aiex.initCause(ex);
throw aiex;
} catch (InvalidStackFrameExceptionWrapper ex) {
AbsentInformationException aiex = new AbsentInformationException(ex.getLocalizedMessage());
aiex.initCause(ex);
throw aiex;
} catch (ObjectCollectedExceptionWrapper ocex) {
AbsentInformationException aiex = new AbsentInformationException(ocex.getLocalizedMessage());
aiex.initCause(ocex);
throw aiex;
} catch (IllegalThreadStateExceptionWrapper itsex) {
// Thrown when thread has exited
AbsentInformationException aiex = new AbsentInformationException(itsex.getLocalizedMessage());
aiex.initCause(itsex);
throw aiex;
} catch (InternalExceptionWrapper ex) {
return new CallStackFrame [0];
} catch (VMDisconnectedExceptionWrapper ex) {
return new CallStackFrame [0];
} finally {
accessLock.readLock().unlock();
}
}
private void cleanCachedFrames() {
synchronized (cachedFramesLock) {
stackDepth = -1;
cachedFrames = null;
cachedFramesFrom = -1;
cachedFramesTo = -1;
}
}
/**
* Returns length of current call stack.
*
* @return length of current call stack
*/
@Override
public int getStackDepth () {
accessLock.readLock().lock();
try {
if (suspended || suspendedNoFire) {
synchronized (cachedFramesLock) {
if (stackDepth < 0) {
stackDepth = ThreadReferenceWrapper.frameCount0(threadReference);
}
return stackDepth;
}
}
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (IncompatibleThreadStateException e) {
} catch (InvalidStackFrameExceptionWrapper e) {
} finally {
accessLock.readLock().unlock();
}
return 0;
}
public void popFrames(StackFrame sf) throws IncompatibleThreadStateException {
try {
notifyToBeResumed();
accessLock.writeLock().lock();
try {
ThreadReferenceWrapper.popFrames(threadReference, sf);
} finally {
accessLock.writeLock().unlock();
}
cleanCachedFrames();
setReturnVariable(null); // Clear the return var
} catch (IllegalThreadStateExceptionWrapper ex) {
throw new IncompatibleThreadStateException("Thread exited.");
} catch (InvalidStackFrameExceptionWrapper ex) {
Exceptions.printStackTrace(ex);
} catch (ObjectCollectedExceptionWrapper ex) {
throw new IncompatibleThreadStateException("Thread died.");
} catch (NativeMethodExceptionWrapper nmex) {
cleanCachedFrames();
Exceptions.printStackTrace(
Exceptions.attachLocalizedMessage(nmex,
NbBundle.getMessage(JPDAThreadImpl.class, "MSG_NativeMethodPop")));
} catch (InternalExceptionWrapper iex) {
cleanCachedFrames();
} catch (VMDisconnectedExceptionWrapper ex) {
// Ignore
} finally {
notifySuspended();
}
}
/**
* Suspends thread.
*/
@Override
public void suspend () {
logger.fine("JPDAThreadImpl.suspend() called.");
Boolean suspendedToFire = null;
boolean isCurrent = debugger.getCurrentThread() == this;
accessLock.writeLock().lock();
try {
logger.fine(" write lock acquired, is suspended = "+suspended+", suspendedNoFire = "+suspendedNoFire);
if (!isSuspended ()) {
if (suspendedNoFire) {
loggerS.fine("["+threadName+"]: suspend(): SETTING suspendRequested = "+true);
// We were suspended just to process something, thus we do not want to be resumed then
suspendRequested = true;
return ;
}
logger.fine("Suspending thread "+threadName);
ThreadReferenceWrapper.suspend (threadReference);
suspendedToFire = Boolean.TRUE;
suspendCount++;
threadName = ThreadReferenceWrapper.name(threadReference);
}
//System.err.println("suspend("+getName()+") suspended = true");
suspended = true;
initiallySuspended = false;
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (InternalExceptionWrapper ex) {
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} finally {
accessLock.writeLock().unlock();
}
if (isCurrent) {
debugger.setStoppedState(threadReference, false);
}
if (suspendedToFire != null) {
pch.firePropertyChange(JPDAThread.PROP_SUSPENDED,
Boolean.valueOf(!suspendedToFire.booleanValue()),
suspendedToFire);
}
}
/**
* Unsuspends thread.
*/
@Override
public void resume () {
boolean can = cleanBeforeResume();
if (can) {
try {
resumeAfterClean();
setAsResumed(false);
} catch (InternalExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} finally {
fireAfterResume();
}
}
/* Original code split among 4 methods:
if (this == debugger.getCurrentThread()) {
boolean can = debugger.currentThreadToBeResumed();
if (!can) return ;
}
Boolean suspendedToFire = null;
accessLock.writeLock().lock();
try {
waitUntilMethodInvokeDone();
setReturnVariable(null); // Clear the return var on resume
setCurrentOperation(null);
currentBreakpoint = null;
if (!doKeepLastOperations) {
clearLastOperations();
}
try {
if (isSuspended ()) {
logger.fine("Resuming thread "+threadName);
int count = ThreadReferenceWrapper.suspendCount (threadReference);
while (count > 0) {
ThreadReferenceWrapper.resume (threadReference); count--;
}
suspendedToFire = Boolean.FALSE;
}
suspendCount = 0;
//System.err.println("resume("+getName()+") suspended = false");
suspended = false;
methodInvokingDisabledUntilResumed = false;
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
}
} finally {
accessLock.writeLock().unlock();
}
JPDABreakpoint brkp = null;
synchronized (stepBreakpointLock) {
if (stepSuspendedByBreakpoint != null) {
brkp = stepSuspendedByBreakpoint;
}
}
if (brkp != null) {
pch.firePropertyChange(PROP_STEP_SUSPENDED_BY_BREAKPOINT, brkp, null);
}
cleanCachedFrames();
if (suspendedToFire != null) {
pch.firePropertyChange(PROP_SUSPENDED,
Boolean.valueOf(!suspendedToFire.booleanValue()),
suspendedToFire);
}
*/
}
private List<PropertyChangeEvent> resumeChangeEvents;
/**
* Acquires necessary locks and cleans the thread state before resume.
* This method is expected to be followed by {@link #resumeAfterClean()} and {@link #fireAfterResume()}.
* This method MUST be followed by {@link #fireAfterResume()} even if it fails with an exception or error
* @return <code>true</code> if caller can procceed with {@link #resumeAfterClean()},
* <code>false</code> when the resume should be abandoned.
*/
public boolean cleanBeforeResume() {
if (this == debugger.getCurrentThread()) {
boolean can = debugger.currentThreadToBeResumed();
if (!can) {
return false;
}
}
accessLock.writeLock().lock();
waitUntilMethodInvokeDone();
setReturnVariable(null); // Clear the return var on resume
setCurrentOperation(null);
synchronized (this) {
currentBreakpoint = null;
breakpointEvent = null;
}
if (!doKeepLastOperations) {
clearLastOperations();
}
cleanCachedFrames();
JPDABreakpoint brkp = removeSuspendedByBreakpoint();
PropertyChangeEvent suspEvt = new PropertyChangeEvent(this, JPDAThread.PROP_SUSPENDED, true, false);
if (brkp != null) {
PropertyChangeEvent brkpEvt = new PropertyChangeEvent(this, PROP_STEP_SUSPENDED_BY_BREAKPOINT,
brkp,
null);
if (isSuspended()) {
resumeChangeEvents = Arrays.asList(new PropertyChangeEvent[] {brkpEvt, suspEvt});
} else {
resumeChangeEvents = Collections.singletonList(brkpEvt);
}
} else {
if (isSuspended()) {
resumeChangeEvents = Collections.singletonList(suspEvt);
} else {
resumeChangeEvents = Collections.emptyList();
}
}
return true;
}
public void resumeAfterClean() throws InternalExceptionWrapper, VMDisconnectedExceptionWrapper {
logger.fine("Resuming thread "+threadName);
boolean resumed = false;
try {
int count = ThreadReferenceWrapper.suspendCount (threadReference);
while (count > 0) {
ThreadReferenceWrapper.resume (threadReference); count--;
}
resumed = true;
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
} finally {
if (!resumed) {
// Do not fire PROP_SUSPENDED when not resumed!
for (PropertyChangeEvent pchEvt : resumeChangeEvents) {
if (JPDAThread.PROP_SUSPENDED.equals(pchEvt.getPropertyName())) {
resumeChangeEvents = new ArrayList<PropertyChangeEvent>(resumeChangeEvents);
resumeChangeEvents.remove(pchEvt);
break;
}
}
}
}
}
/** Must be called between {@link #cleanBeforeResume()} and {@link #fireAfterResume()} only. */
public boolean reduceThreadSuspendCount() throws InternalExceptionWrapper, VMDisconnectedExceptionWrapper {
boolean reduced = true;
try {
int count = ThreadReferenceWrapper.suspendCount (threadReference);
reduced = count == 1;
//if (!reduced) logger.severe("Reducing suspend count of existing "+getThreadStateLog());
while (count > 1) {
ThreadReferenceWrapper.resume (threadReference); count--;
}
reduced = true;
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
} finally {
if (!reduced) {
// Do not fire PROP_SUSPENDED when not resumed!
for (PropertyChangeEvent pchEvt : resumeChangeEvents) {
if (JPDAThread.PROP_SUSPENDED.equals(pchEvt.getPropertyName())) {
resumeChangeEvents = new ArrayList<PropertyChangeEvent>(resumeChangeEvents);
resumeChangeEvents.remove(pchEvt);
break;
}
}
}
}
return reduced;
}
public void setAsResumed(boolean reduceSuspendCountOnly) {
if (reduceSuspendCountOnly) {
suspendCount--;
} else {
suspendCount = 0;
}
if (suspendCount == 0) {
//System.err.println("resume("+getName()+") suspended = false");
suspended = false;
suspendedOnAnEvent = false;
suspendedNoFire = false;
debugger.setCurrentSuspendedNoFireThread(null);
methodInvokingDisabledUntilResumed = false;
}
}
public void fireAfterResume() {
List<PropertyChangeEvent> evts = resumeChangeEvents;
resumeChangeEvents = null;
boolean fire = suspendCount == 0;
accessLock.writeLock().unlock();
if (fire) {
for (PropertyChangeEvent evt : evts) {
pch.firePropertyChange(evt);
}
}
}
public void notifyToBeResumed() {
//System.err.println("notifyToBeResumed("+getName()+")");
List<PropertyChangeEvent> evts = notifyToBeRunning(true, true);
for (PropertyChangeEvent evt : evts) {
pch.firePropertyChange(evt);
}
}
/**
* Call only after {@link #notifyToBeResumedNoFire()} to fire the change events.
*/
public void fireAfterNotifyToBeResumedNoFire() {
List<PropertyChangeEvent> evts = resumeChangeEvents;
resumeChangeEvents = null;
if (evts != null) {
for (PropertyChangeEvent evt : evts) {
pch.firePropertyChange(evt);
}
}
}
public boolean notifyToBeResumedNoFire() {
//System.err.println("notifyToBeResumed("+getName()+")");
logger.fine("notifyToBeResumedNoFire("+getName()+")");
accessLock.writeLock().lock();
loggerS.fine("["+threadName+"]: "+"notifyToBeResumedNoFire() suspended = "+suspended+", suspendRequested = "+suspendRequested);
try {
logger.fine(" suspendRequested = "+suspendRequested);
if (suspendRequested) {
suspendRequested = false;
return false;
}
resumeChangeEvents = notifyToBeRunning(true, true);
} finally {
accessLock.writeLock().unlock();
}
return true;
}
private List<PropertyChangeEvent> notifyToBeRunning(boolean clearVars, boolean resumed) {
if (resumed) {
// Reset the pending action when the thread is resumed.
setPendingAction(null);
}
Boolean suspendedToFire = null;
accessLock.writeLock().lock();
try {
if (resumed) {
waitUntilMethodInvokeDone();
}
//System.err.println("notifyToBeRunning("+getName()+"), resumed = "+resumed+", suspendCount = "+suspendCount+", thread's suspendCount = "+threadReference.suspendCount());
if (resumed && (--suspendCount > 0)) return Collections.emptyList();
//System.err.println(" suspendCount = 0, var suspended = "+suspended);
suspendCount = 0;
if (clearVars) {
setCurrentOperation(null);
setReturnVariable(null); // Clear the return var on resume
currentBreakpoint = null;
if (!doKeepLastOperations) {
clearLastOperations();
}
}
if (suspended) {
//System.err.println("notifyToBeRunning("+getName()+") suspended = false");
suspended = false;
suspendedToFire = Boolean.FALSE;
methodInvokingDisabledUntilResumed = false;
}
if (resumed) {
suspendedNoFire = false;
suspendedOnAnEvent = false;
debugger.setCurrentSuspendedNoFireThread(null);
}
} finally {
accessLock.writeLock().unlock();
}
cleanCachedFrames();
PropertyChangeEvent stepBrkpEvt = null;
JPDABreakpoint stepBrkp = removeSuspendedByBreakpoint();
if (stepBrkp != null) {
stepBrkpEvt = new PropertyChangeEvent(this, PROP_STEP_SUSPENDED_BY_BREAKPOINT,
stepBrkp, null);
}
if (suspendedToFire != null) {
PropertyChangeEvent suspEvt = new PropertyChangeEvent(this, JPDAThread.PROP_SUSPENDED,
Boolean.valueOf(!suspendedToFire.booleanValue()),
suspendedToFire);
if (!resumed) suspEvt.setPropagationId("methodInvoke"); // NOI18N
if (stepBrkpEvt != null) {
return Arrays.asList(new PropertyChangeEvent[] {stepBrkpEvt, suspEvt});
} else {
return Collections.singletonList(suspEvt);
}
} else {
if (stepBrkpEvt != null) {
return Collections.singletonList(stepBrkpEvt);
}
return Collections.emptyList();
}
}
public void updateSuspendCount(int suspendCount) {
accessLock.writeLock().lock();
try {
this.suspendCount = suspendCount;
} finally {
accessLock.writeLock().unlock();
}
}
public void notifySuspended() {
notifySuspended(true, false);
}
public void notifySuspendedNoFire(boolean eventInThisThread, boolean threadDied) {
//notifySuspended(false);
// Keep the thread look like running until we get a firing notification
accessLock.writeLock().lock();
try {
if (eventInThisThread) { // Do not reset to false when we do not know.
suspendedOnAnEvent = eventInThisThread;
}
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: "+"notifySuspendedNoFire() suspended = "+suspended+", suspendCount = "+suspendCount+", suspendedOnAnEvent = "+eventInThisThread);
}
if (suspended && suspendCount > 0 && !initiallySuspended) {
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: notifySuspendedNoFire(): SETTING suspendRequested = "+true);
}
suspendRequested = true; // The thread was just suspended, leave it suspended afterwards.
}
if (!threadDied) {
try {
suspendCount = ThreadReferenceWrapper.suspendCount(threadReference);
threadName = ThreadReferenceWrapper.name(threadReference);
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ocex) {
// The thread is gone
} catch (VMDisconnectedExceptionWrapper ex) {
// The VM is gone
} catch (InternalExceptionWrapper ex) {
// Something is gone
}
} else {
suspendCount = 1; // Suppose
}
suspendedNoFire = true;
debugger.setCurrentSuspendedNoFireThread(this);
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: (notifySuspendedNoFire() END) suspended = "+suspended+", suspendedNoFire = "+suspendedNoFire+", suspendRequested = "+suspendRequested);
}
} finally {
accessLock.writeLock().unlock();
}
}
public PropertyChangeEvent notifySuspended(boolean doFire, boolean explicitelyPaused) {
return notifySuspended(doFire, explicitelyPaused, true);
}
private PropertyChangeEvent notifySuspended(boolean doFire, boolean explicitelyPaused, boolean verifyStatusAndName) {
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: "+"notifySuspended(doFire = "+doFire+", explicitelyPaused = "+explicitelyPaused+")");
}
Boolean suspendedToFire = null;
accessLock.writeLock().lock();
initiallySuspended = false;
try {
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: (notifySuspended() BEGIN) suspended = "+suspended+", suspendedNoFire = "+suspendedNoFire);
}
if (explicitelyPaused && !suspended && suspendedNoFire) {
suspendRequested = true;
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: suspendRequested = "+suspendRequested);
}
return null;
}
if (verifyStatusAndName) {
try {
suspendCount = ThreadReferenceWrapper.suspendCount(threadReference);
threadName = ThreadReferenceWrapper.name(threadReference);
} catch (IllegalThreadStateExceptionWrapper ex) {
return null; // Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ocex) {
return null; // The thread is gone
} catch (VMDisconnectedExceptionWrapper ex) {
return null; // The VM is gone
} catch (InternalExceptionWrapper ex) {
return null; // Something is gone
}
} else {
suspendCount = 1;
}
//System.err.println("notifySuspended("+getName()+") suspendCount = "+suspendCount+", var suspended = "+suspended);
suspendedNoFire = false;
debugger.setCurrentSuspendedNoFireThread(null);
if ((!suspended || suspendedNoFire && doFire) && (!verifyStatusAndName || isThreadSuspended())) {
//System.err.println(" setting suspended = true");
suspended = true;
suspendedToFire = Boolean.TRUE;
if (doFire) {
try {
threadName = ThreadReferenceWrapper.name(threadReference);
} catch (InternalExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
}
}
}
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: (notifySuspended() END) suspended = "+suspended+", suspendedNoFire = "+suspendedNoFire+", suspendRequested = "+suspendRequested);
}
} finally {
accessLock.writeLock().unlock();
}
if (doFire && suspendedToFire != null) {
pch.firePropertyChange(JPDAThread.PROP_SUSPENDED,
Boolean.valueOf(!suspendedToFire.booleanValue()),
suspendedToFire);
} else if (suspendedToFire != null) {
return new PropertyChangeEvent(this, JPDAThread.PROP_SUSPENDED,
Boolean.valueOf(!suspendedToFire.booleanValue()),
suspendedToFire);
}
return null;
}
/**
* Call ONLY with events obtained from {@link #notifySuspended()} or other
* methods that return event(s).
* @param event The event to fire.
*/
public void fireEvent(PropertyChangeEvent event) {
pch.firePropertyChange(event);
}
private SingleThreadWatcher watcher = null;
private boolean methodInvoking;
private boolean methodInvokingDisabledUntilResumed;
private boolean resumedToFinishMethodInvocation;
private boolean unsuspendedStateWhenInvoking;
private List<StepRequest> stepsDeletedDuringMethodInvoke;
public void notifyMethodInvoking() throws PropertyVetoException {
SingleThreadWatcher watcherToDestroy = null;
List<PropertyChangeEvent> evts;
accessLock.writeLock().lock();
try {
logger.log(Level.FINE, "Invoking a method in thread {0}", threadName);
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: Invoking a method, suspended = "+suspended+", suspendedNoFire = "+suspendedNoFire+", suspendRequested = "+suspendRequested);
}
if (methodInvokingDisabledUntilResumed) {
throw new PropertyVetoException(
NbBundle.getMessage(JPDAThreadImpl.class, "MSG_DisabledUntilResumed"), null);
}
if (methodInvoking) {
throw new PropertyVetoException(
NbBundle.getMessage(JPDAThreadImpl.class, "MSG_AlreadyInvoking"), null);
}
if (!(suspended || suspendedNoFire)) {
throw new PropertyVetoException(
NbBundle.getMessage(JPDAThreadImpl.class, "MSG_NoCurrentContext"), null);
}
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("Suspend count of "+this+" before notifyMethodInvoking() is: "+suspendCount);
}
try {
int tsc = ThreadReferenceWrapper.suspendCount(threadReference);
if (suspendCount != tsc) {
// Should not occur
logger.log(Level.INFO, getThreadStateLog(), new IllegalStateException(
"Different suspend counts! JPDA Thread = "+suspendCount+
", thread "+threadReference+" = "+tsc+
", thread's state: "+getThreadStateLog(threadReference)));
suspendCount = tsc;
}
// The thread needs to be only single-suspended, otherwise it can not invoke methods.
for (int sc = 1; sc < suspendCount; sc++) {
ThreadReferenceWrapper.resume(threadReference);
}
} catch (InternalExceptionWrapper iew) {
} catch (VMDisconnectedExceptionWrapper dew) {
} catch (VMDisconnectedException de) { // from ThreadReference.toString()
} catch (ObjectCollectedExceptionWrapper oce) {
} catch (IllegalThreadStateExceptionWrapper itse) {
}
debugger.getOperator().notifyMethodInvoking(threadReference);
if (vm != null) {
// Check if there aren't any steps submitted, which would break method invocation:
try {
EventRequestManager erm = VirtualMachineWrapper.eventRequestManager(vm);
List<StepRequest> srs = EventRequestManagerWrapper.stepRequests0(erm);
List<StepRequest> stepsToDelete = null;
for (StepRequest sr : srs) {
ThreadReference t = StepRequestWrapper.thread(sr);
if (threadReference.equals(t)) {
if (stepsToDelete == null) {
stepsToDelete = new ArrayList<StepRequest>();
}
if (checkToDisableStep(sr, t)) {
stepsToDelete.add(sr);
}
}
}
if (stepsToDelete != null) {
for (StepRequest sr : stepsToDelete) {
//debugger.getOperator().unregister(sr);
//EventRequestManagerWrapper.deleteEventRequest(erm, sr);
boolean wasEnabled = EventRequestWrapper.isEnabled0(sr);
EventRequestWrapper.putProperty(sr, "methodInvoking.wasEnabled", wasEnabled);
EventRequestWrapper.disable(sr);
if (logger.isLoggable(Level.FINE)) logger.fine("DISABLED Step Request: "+sr);
}
}
stepsDeletedDuringMethodInvoke = stepsToDelete;
} catch (InternalExceptionWrapper iew) {
} catch (VMDisconnectedExceptionWrapper dew) {
} catch (InvalidRequestStateExceptionWrapper irse) {
} catch (ObjectCollectedExceptionWrapper oce) {}
}
methodInvoking = true;
unsuspendedStateWhenInvoking = !isSuspended();
if (unsuspendedStateWhenInvoking) {
// Do not notify running state when was not suspended.
evts = Collections.emptyList();
} else {
evts = notifyToBeRunning(false, false);
}
watcherToDestroy = watcher;
watcher = new SingleThreadWatcher(this);
//logger.severe("Before method invoke: "+getThreadStateLog());
} finally {
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: unsuspendedStateWhenInvoking = "+unsuspendedStateWhenInvoking);
}
accessLock.writeLock().unlock();
}
if (watcherToDestroy != null) {
watcherToDestroy.destroy();
}
for (PropertyChangeEvent evt : evts) {
pch.firePropertyChange(evt);
}
synchronized (suspendToCheckForMonitorsLock) {
canSuspendToCheckForMonitors = true;
}
}
private static boolean checkToDisableStep(StepRequest sr, ThreadReference t) {
int stepKind;
try {
stepKind = StepRequestWrapper.depth(sr);
} catch (InternalExceptionWrapper | VMDisconnectedExceptionWrapper ex) {
// A wrong state, do nothing
return false;
}
if (stepKind == StepRequest.STEP_INTO) {
// Disable step into as method invocation will suspend on it
return true;
}
int threadDepth;
try {
threadDepth = ThreadReferenceWrapper.frameCount(t);
} catch (IllegalThreadStateExceptionWrapper | IncompatibleThreadStateException |
InternalExceptionWrapper | InvalidStackFrameExceptionWrapper ex) {
// We can not retrieve the frame depth
return true;
} catch (ObjectCollectedExceptionWrapper | VMDisconnectedExceptionWrapper ex) {
// A wrong state, do nothing
return false;
}
int stepDepth = StepUtils.getOriginalStepDepth(sr);
if (stepDepth > 0) {
// If the depth at which the step was submitted is less than the current depth,
// do not disable the step as it will not interfere with the method invocation.
// The invocation will not go up the stack.
return stepDepth >= threadDepth;
}
return true;
}
public void notifyMethodInvokeDone() {
synchronized (suspendToCheckForMonitorsLock) {
canSuspendToCheckForMonitors = false;
}
SingleThreadWatcher watcherToDestroy = null;
boolean wasUnsuspendedStateWhenInvoking;
accessLock.writeLock().lock();
try {
logger.log(Level.FINE, "Method invoke done in thread {0}", threadName);
if (loggerS.isLoggable(Level.FINE)) {
loggerS.fine("["+threadName+"]: Method invoke done, suspended = "+suspended+", suspendedNoFire = "+suspendedNoFire+", suspendRequested = "+suspendRequested+", unsuspendedStateWhenInvoking = "+unsuspendedStateWhenInvoking);
}
try {
if (resumedToFinishMethodInvocation) {
// HACK becuase of JDI, we've resumed this thread so that method invocation can be finished.
// We need to suspend the thread immediately so that it does not continue after the invoke has finished.
ThreadReferenceWrapper.suspend(threadReference);
//System.err.println("\""+getName()+"\""+": Suspended after method invocation.");
resumedToFinishMethodInvocation = false;
}
for (int sc = 1; sc < suspendCount; sc++) {
ThreadReferenceWrapper.suspend(threadReference);
}
} catch (InternalExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (IllegalThreadStateExceptionWrapper ex) {
}
debugger.getOperator().notifyMethodInvokeDone(threadReference);
if (stepsDeletedDuringMethodInvoke != null) {
try {
for (StepRequest sr : stepsDeletedDuringMethodInvoke) {
try {
Object wasEnabled = EventRequestWrapper.getProperty(sr, "methodInvoking.wasEnabled");
if (Boolean.TRUE.equals(wasEnabled)) {
EventRequestWrapper.enable(sr);
}
} catch (ObjectCollectedExceptionWrapper ex) {
continue;
} catch (InvalidRequestStateExceptionWrapper irse) {
continue;
}
if (logger.isLoggable(Level.FINE)) logger.fine("ENABLED Step Request: "+sr);
}
} catch (InternalExceptionWrapper iew) {
} catch (VMDisconnectedExceptionWrapper dew) {}
stepsDeletedDuringMethodInvoke = null;
}
methodInvoking = false;
wasUnsuspendedStateWhenInvoking = unsuspendedStateWhenInvoking;
unsuspendedStateWhenInvoking = false;
//logger.severe("After method invoke: "+getThreadStateLog());
synchronized (this) {
this.notifyAll();
}
watcherToDestroy = watcher;
watcher = null;
} finally {
accessLock.writeLock().unlock();
}
if (watcherToDestroy != null) {
watcherToDestroy.destroy();
}
// Do not notify suspended state when was already unsuspended when started invoking.
if (!wasUnsuspendedStateWhenInvoking) {
PropertyChangeEvent evt = notifySuspended(false, false, false);
if (evt != null) {
evt.setPropagationId("methodInvoke"); // NOI18N
pch.firePropertyChange(evt);
}
}
}
public boolean isMethodInvoking() {
return methodInvoking;
}
public void waitUntilMethodInvokeDone() {
accessLock.readLock().lock();
try {
while (methodInvoking) {
synchronized (this) {
try {
this.wait();
} catch (InterruptedException iex) {
break;
}
}
}
} finally {
accessLock.readLock().unlock();
}
}
public void waitUntilMethodInvokeDone(long timeout) throws InterruptedException {
if (!accessLock.readLock().tryLock(timeout, TimeUnit.MILLISECONDS)) {
return ;
}
try {
while (methodInvoking) {
synchronized (this) {
this.wait(timeout);
}
}
} finally {
accessLock.readLock().unlock();
}
}
public void disableMethodInvokeUntilResumed() {
accessLock.writeLock().lock();
methodInvokingDisabledUntilResumed = true;
accessLock.writeLock().unlock();
}
private boolean inStep = false;
public void setInStep(boolean inStep, EventRequest stepRequest) {
SingleThreadWatcher watcherToDestroy = null;
this.inStep = inStep;
watcherToDestroy = watcher;
if (inStep) {
boolean suspendThread;
try {
suspendThread = EventRequestWrapper.suspendPolicy(stepRequest) == StepRequest.SUSPEND_EVENT_THREAD;
} catch (InternalExceptionWrapper ex) {
suspendThread = false;
} catch (VMDisconnectedExceptionWrapper ex) {
suspendThread = false;
}
if (suspendThread) {
watcher = new SingleThreadWatcher(this);
}
} else {
watcher = null;
}
if (watcherToDestroy != null) {
watcherToDestroy.destroy();
}
}
public boolean isInStep() {
return inStep;
}
@Override
public void interrupt() {
try {
if (isSuspended ()) return;
ThreadReferenceWrapper.interrupt(threadReference);
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} catch (ObjectCollectedExceptionWrapper ex) {
} catch (InternalExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
}
}
/**
* Sets this thread current.
*
* @see JPDADebugger#getCurrentThread
*/
@Override
public void makeCurrent () {
if (Mutex.EVENT.isReadAccess()) {
debugger.getRequestProcessor().post(new Runnable() {
@Override
public void run() {
doMakeCurrent();
}
});
} else {
doMakeCurrent();
}
}
private void doMakeCurrent() {
debugger.setCurrentThread (this);
Session session = debugger.getSession();
DebuggerManager manager = DebuggerManager.getDebuggerManager();
if (session != manager.getCurrentSession()) {
manager.setCurrentSession(session);
}
}
/**
* Returns monitor this thread is waiting on.
*
* @return monitor this thread is waiting on
*/
@Override
public ObjectVariable getContendedMonitor () {
if (!VirtualMachineWrapper.canGetCurrentContendedMonitor0(vm)) {
return null;
}
try {
ObjectReference or;
accessLock.readLock().lock();
try {
if (!(isSuspended() || suspendedNoFire)) return null;
try {
if ("DestroyJavaVM".equals(threadName)) {
// See defect #6474293
return null;
}
or = ThreadReferenceWrapper.currentContendedMonitor(threadReference);
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
return null;
} catch (IncompatibleThreadStateException e) {
Logger.getLogger(JPDAThreadImpl.class.getName()).log(Level.INFO, getThreadStateLog(), e);
return null;
} catch (com.sun.jdi.InternalException iex) {
Logger.getLogger(JPDAThreadImpl.class.getName()).log(Level.INFO, getThreadStateLog(), iex);
return null;
}
} finally {
accessLock.readLock().unlock();
}
if (or == null) return null;
return new ThisVariable (debugger, or, "" + ObjectReferenceWrapper.uniqueID(or));
} catch (InternalExceptionWrapper e) {
return null;
} catch (ObjectCollectedExceptionWrapper e) {
return null;
} catch (VMDisconnectedExceptionWrapper e) {
return null;
}
}
@Override
public MonitorInfo getContendedMonitorAndOwner() {
ObjectVariable monitor = getContendedMonitor();
if (monitor == null) return null;
// Search for the owner:
MonitorInfo monitorInfo = null;
JPDAThread thread = null;
List<JPDAThread> threads = debugger.getThreadsCollector().getAllThreads();
for (JPDAThread t : threads) {
if (this == t) continue;
ObjectVariable[] ms = t.getOwnedMonitors();
for (ObjectVariable m : ms) {
if (monitor.equals(m)) {
thread = t;
List<MonitorInfo> mf = t.getOwnedMonitorsAndFrames();
for (MonitorInfo mi : mf) {
if (monitor.equals(mi.getMonitor())) {
monitorInfo = mi;
break;
}
}
break;
}
}
if (thread != null) {
break;
}
}
if (monitorInfo != null) {
return monitorInfo;
}
return new MonitorInfoImpl(thread, null, monitor);
}
/**
* Returns monitors owned by this thread.
*
* @return monitors owned by this thread
*/
@Override
public ObjectVariable[] getOwnedMonitors () {
if (!VirtualMachineWrapper.canGetOwnedMonitorInfo0(vm)) {
return new ObjectVariable[0];
}
List<ObjectReference> l;
accessLock.readLock().lock();
try {
if (!(isSuspended() || suspendedNoFire)) return new ObjectVariable [0];
if ("DestroyJavaVM".equals(threadName)) {
// See defect #6474293
return new ObjectVariable[0];
}
try {
l = ThreadReferenceWrapper.ownedMonitors (threadReference);
if (l == null) l = Collections.emptyList();
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
return new ObjectVariable [0];
} catch (ObjectCollectedExceptionWrapper ex) {
return new ObjectVariable [0];
} catch (VMDisconnectedExceptionWrapper ex) {
return new ObjectVariable [0];
} catch (IncompatibleThreadStateException e) {
Logger.getLogger(JPDAThreadImpl.class.getName()).log(Level.INFO, getThreadStateLog(), e);
return new ObjectVariable [0];
} catch (InternalExceptionWrapper iex) {
Logger.getLogger(JPDAThreadImpl.class.getName()).log(Level.INFO, getThreadStateLog(), iex);
return new ObjectVariable [0];
}
} finally {
accessLock.readLock().unlock();
}
int i, k = l.size ();
try {
ObjectVariable[] vs = new ObjectVariable [k];
for (i = 0; i < k; i++) {
ObjectReference var = l.get (i);
vs[i] = new ThisVariable(debugger, var, "" + ObjectReferenceWrapper.uniqueID(var));
}
return vs;
} catch (InternalExceptionWrapper ex) {
return new ObjectVariable [0];
} catch (VMDisconnectedExceptionWrapper ex) {
return new ObjectVariable [0];
} catch (ObjectCollectedExceptionWrapper ex) {
return new ObjectVariable [0];
}
}
public ThreadReference getThreadReference () {
return threadReference;
}
public synchronized ReturnVariableImpl getReturnVariable() {
return returnVariable;
}
public synchronized void setReturnVariable(ReturnVariableImpl returnVariable) {
this.returnVariable = returnVariable;
}
@Override
public void addPropertyChangeListener(PropertyChangeListener l) {
pch.addPropertyChangeListener(l);
}
@Override
public void removePropertyChangeListener(PropertyChangeListener l) {
pch.removePropertyChangeListener(l);
}
@Override
public void addPropertyChangeListener(String name, PropertyChangeListener pcl) {
if (PROP_OPERATIONS_SET.equals(name) || PROP_OPERATIONS_UPDATE.equals(name)) {
operationsPch.addPropertyChangeListener(name, pcl);
} else {
pch.addPropertyChangeListener(name, pcl);
}
}
@Override
public void removePropertyChangeListener(String name, PropertyChangeListener pcl) {
if (PROP_OPERATIONS_SET.equals(name) || PROP_OPERATIONS_UPDATE.equals(name)) {
operationsPch.removePropertyChangeListener(name, pcl);
} else {
pch.removePropertyChangeListener(name, pcl);
}
}
private void fireSuspended(boolean suspended) {
pch.firePropertyChange(JPDAThread.PROP_SUSPENDED,
Boolean.valueOf(!suspended), Boolean.valueOf(suspended));
}
private void fireOperationsChanged(String name) {
operationsPch.firePropertyChange(name, null, null);
}
@Override
public void setObject(Object bean) {
throw new UnsupportedOperationException("Not supported, do not call. Implementing Customizer interface just because of add/remove PropertyChangeListener.");
}
@Override
public void setBeanContext(BeanContext bc) throws PropertyVetoException {
throw new UnsupportedOperationException("Not supported, do not call. Implementing BeanContextChild interface just because of add/remove PropertyChangeListener.");
}
@Override
public BeanContext getBeanContext() {
throw new UnsupportedOperationException("Not supported, do not call. Implementing BeanContextChild interface just because of add/remove PropertyChangeListener.");
}
@Override
public void addVetoableChangeListener(String name, VetoableChangeListener vcl) {
throw new UnsupportedOperationException("Not supported, do not call. Implementing BeanContextChild interface just because of add/remove PropertyChangeListener.");
}
@Override
public void removeVetoableChangeListener(String name, VetoableChangeListener vcl) {
throw new UnsupportedOperationException("Not supported, do not call. Implementing BeanContextChild interface just because of add/remove PropertyChangeListener.");
}
@Override
public List<MonitorInfo> getOwnedMonitorsAndFrames() {
if (VirtualMachineWrapper.canGetMonitorFrameInfo0(vm)) {
accessLock.readLock().lock();
try {
if (!(isSuspended() || suspendedNoFire) || getState() == ThreadReference.THREAD_STATUS_ZOMBIE) {
return Collections.emptyList();
}
synchronized (ownedMonitorsAndFramesSingleAccessLock) {
// Prevent from inconsistencies coming from unsynchronized implementation of
// com.sun.tools.jdi.ThreadReferenceImpl.ownedMonitorsAndFrames()
List<com.sun.jdi.MonitorInfo> monitorInfos = ThreadReferenceWrapper.ownedMonitorsAndFrames0(threadReference);
if (monitorInfos != null && monitorInfos.size() > 0) {
List<MonitorInfo> mis = new ArrayList<MonitorInfo>(monitorInfos.size());
for (com.sun.jdi.MonitorInfo monitorInfo : monitorInfos) {
MonitorInfo mi = createMonitorInfo(monitorInfo);
if (mi != null) {
mis.add(mi);
}
}
return Collections.unmodifiableList(mis);
}
}
} catch (IncompatibleThreadStateException ex) {
Logger.getLogger(JPDAThreadImpl.class.getName()).log(Level.INFO, getThreadStateLog(), ex);
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
} finally {
accessLock.readLock().unlock();
}
}
return Collections.emptyList();
}
/**
*
* @param mi com.sun.jdi.MonitorInfo
* @return monitor info
*/
private MonitorInfo createMonitorInfo(com.sun.jdi.MonitorInfo mi) {
try {
int depth = MonitorInfoWrapper.stackDepth(mi);
CallStackFrame frame = null;
if (depth >= 0) {
try {
CallStackFrame[] frames = getCallStack(depth, depth + 1);
//frame = new CallStackFrameImpl(this, threadReference.frame(depth), depth, debugger);
if (frames.length > 0) {
frame = frames[0];
}
//} catch (IncompatibleThreadStateException ex) {
} catch (AbsentInformationException ex) {
Exceptions.printStackTrace(ex);
}
}
ObjectReference or = MonitorInfoWrapper.monitor(mi);
ObjectVariable monitor = new ThisVariable (debugger, or, "" + ObjectReferenceWrapper.uniqueID(or));
return new MonitorInfoImpl(this, frame, monitor);
} catch (InvalidStackFrameExceptionWrapper ex) {
Exceptions.printStackTrace(ex);
} catch (InternalExceptionWrapper ex) {
} catch (VMDisconnectedExceptionWrapper ex) {
} catch (ObjectCollectedExceptionWrapper ex) {
}
return null;
}
public boolean checkForBlockingThreads() {
try {
if (!VirtualMachineWrapper.canGetCurrentContendedMonitor0(vm) ||
!VirtualMachineWrapper.canGetMonitorInfo0(vm)) {
return false;
}
//System.err.println("\""+getName()+"\".checkForBlockingThreads()");
Map<ThreadReference, ObjectReference> lockedThreadsWithMonitors = null;
//synchronized (t.getDebugger().LOCK) { - can not synchronize on that - method invocation uses this lock.
// TODO: Need to be freed up and method invocation flag needs to be used instead.
List<JPDAThread> oldLockerThreadsList;
List<JPDAThread> newLockerThreadsList;
// Do not wait for write lock if it's not available, since no one will get read lock!
boolean locked = debugger.accessLock.writeLock().tryLock();
if (!locked) {
debugger.accessLock.readLock().lock();
try {
// We can not suspend the whole VM, but can check currently suspended threads, at least:
locked = accessLock.writeLock().tryLock();
if (!locked) {
synchronized (suspendToCheckForMonitorsLock) {
if (!canSuspendToCheckForMonitors) {
return false;
}
ThreadReferenceWrapper.suspend(threadReference);
try {
ObjectReference waitingMonitor = ThreadReferenceWrapper.currentContendedMonitor(threadReference);
if (waitingMonitor != null) {
synchronized (lockerThreadsLock) {
if (waitingMonitor.equals(lockerThreadsMonitor)) {
// We're still blocked at the monitor
return true;
}
}
lockedThreadsWithMonitors = findLockPath(vm, threadReference, false, waitingMonitor);
}
Pair<List<JPDAThread>, List<JPDAThread>> lockerThreadsLists =
updateLockerThreads(lockedThreadsWithMonitors, waitingMonitor);
oldLockerThreadsList = lockerThreadsLists.first();
newLockerThreadsList = lockerThreadsLists.second();
} catch (IncompatibleThreadStateException ex) {
return false;
} finally {
ThreadReferenceWrapper.resume(threadReference);
}
}
} else {
try {
boolean wasSuspended = false;
try {
wasSuspended = ThreadReferenceWrapper.isSuspended0(threadReference);
if (!wasSuspended) {
ThreadReferenceWrapper.suspend(threadReference);
}
ObjectReference waitingMonitor = ThreadReferenceWrapper.currentContendedMonitor(threadReference);
if (waitingMonitor != null) {
synchronized (lockerThreadsLock) {
if (waitingMonitor.equals(lockerThreadsMonitor)) {
// We're still blocked at the monitor
return true;
}
}
lockedThreadsWithMonitors = findLockPath(vm, threadReference, false, waitingMonitor);
}
Pair<List<JPDAThread>, List<JPDAThread>> lockerThreadsLists =
updateLockerThreads(lockedThreadsWithMonitors, waitingMonitor);
oldLockerThreadsList = lockerThreadsLists.first();
newLockerThreadsList = lockerThreadsLists.second();
} catch (IncompatibleThreadStateException ex) {
return false;
} finally {
if (wasSuspended) {
ThreadReferenceWrapper.resume(threadReference);
}
}
} finally {
accessLock.writeLock().unlock();
}
}
} finally {
debugger.accessLock.readLock().unlock();
}
} else {
// locked debugger access
try {
logger.fine("checkForBlockingThreads("+threadName+"): suspend all...");
VirtualMachineWrapper.suspend(vm);
try {
ObjectReference waitingMonitor = ThreadReferenceWrapper.currentContendedMonitor(threadReference);
if (waitingMonitor != null) {
synchronized (lockerThreadsLock) {
if (waitingMonitor.equals(lockerThreadsMonitor)) {
// We're still blocked at the monitor
return true;
}
}
lockedThreadsWithMonitors = findLockPath(vm, threadReference, true, waitingMonitor);
}
Pair<List<JPDAThread>, List<JPDAThread>> lockerThreadsLists =
updateLockerThreads(lockedThreadsWithMonitors, waitingMonitor);
oldLockerThreadsList = lockerThreadsLists.first();
newLockerThreadsList = lockerThreadsLists.second();
//System.err.println("Locker threads list = "+newLockerThreadsList);
} catch (IncompatibleThreadStateException ex) {
return false;
} finally {
logger.fine("checkForBlockingThreads("+threadName+"): resume all.");
VirtualMachineWrapper.resume(vm);
}
} finally {
debugger.accessLock.writeLock().unlock();
}
}
if (oldLockerThreadsList != newLockerThreadsList) { // Not fire when both null
//System.err.println("Fire lockerThreads: "+(oldLockerThreadsList == null || !oldLockerThreadsList.equals(newLockerThreadsList)));
pch.firePropertyChange(PROP_LOCKER_THREADS, oldLockerThreadsList, newLockerThreadsList); // NOI18N
}
//}
//setLockerThreads(lockedThreadsWithMonitors);
return lockedThreadsWithMonitors != null;
} catch (VMDisconnectedExceptionWrapper e) {
} catch (InternalExceptionWrapper e) {
} catch (ObjectCollectedExceptionWrapper e) {
} catch (IllegalThreadStateExceptionWrapper ex) {
// Thrown when thread has exited
}
return false;
}
private Pair<List<JPDAThread>, List<JPDAThread>> updateLockerThreads(
Map<ThreadReference, ObjectReference> lockedThreadsWithMonitors,
ObjectReference waitingMonitor) throws InternalExceptionWrapper,
VMDisconnectedExceptionWrapper,
ObjectCollectedExceptionWrapper,
IllegalThreadStateExceptionWrapper {
List<JPDAThread> oldLockerThreadsList;
List<JPDAThread> newLockerThreadsList;
synchronized (lockerThreadsLock) {
oldLockerThreadsList = lockerThreadsList;
if (lockedThreadsWithMonitors != null) {
//lockerThreads2 = lockedThreadsWithMonitors;
lockerThreadsMonitor = waitingMonitor;
if (!submitMonitorEnteredFor(waitingMonitor)) {
submitCheckForMonitorEntered(waitingMonitor);
}
lockerThreadsList = new ThreadListDelegate(debugger, new ArrayList<ThreadReference>(lockedThreadsWithMonitors.keySet()));
} else {
//lockerThreads2 = null;
lockerThreadsMonitor = null;
lockerThreadsList = null;
}
newLockerThreadsList = lockerThreadsList;
}
return Pair.of(oldLockerThreadsList, newLockerThreadsList);
}
private static Map<ThreadReference, ObjectReference> findLockPath(VirtualMachine vm, ThreadReference tr,
final boolean suspendedAll,
ObjectReference waitingMonitor) throws IncompatibleThreadStateException,
InternalExceptionWrapper,
VMDisconnectedExceptionWrapper,
ObjectCollectedExceptionWrapper,
IllegalThreadStateExceptionWrapper {
Map<ThreadReference, ObjectReference> threadsWithMonitors = new LinkedHashMap<ThreadReference, ObjectReference>();
Map<ObjectReference, ThreadReference> monitorMap = new HashMap<ObjectReference, ThreadReference>();
for (ThreadReference t : VirtualMachineWrapper.allThreads(vm)) {
if (suspendedAll || ThreadReferenceWrapper.isSuspended0(t)) {
List<ObjectReference> monitors;
try {
monitors = ThreadReferenceWrapper.ownedMonitors0(t);
} catch (IllegalThreadStateExceptionWrapper | IncompatibleThreadStateException ex) {
continue;
}
if (monitors != null) {
for (ObjectReference m : monitors) {
monitorMap.put(m, t);
}
}
}
}
while (waitingMonitor != null) {
ThreadReference t = monitorMap.get(waitingMonitor);
if (t != null) {
if (ThreadReferenceWrapper.suspendCount(t) > (suspendedAll ? 1 : 0)) { // Add it if it was suspended before
threadsWithMonitors.put(t, waitingMonitor);
}
waitingMonitor = ThreadReferenceWrapper.currentContendedMonitor(t);
} else {
break;
}
}
if (threadsWithMonitors.size() > 0) {
return threadsWithMonitors;
} else {
return null;
}
}
public synchronized List<JPDAThread> getLockerThreads() {
return lockerThreadsList;
}
public JPDADebuggerImpl getDebugger() {
return debugger;
}
/*public void resumeToFreeMonitor(Variable monitor) {
synchronized (this) {
if (!isSuspended()) {
return ; // Already resumed
}
}
threadReference.virtualMachine().eventRequestManager();
}*/
public void resumeBlockingThreads() {
List<JPDAThread> blockingThreads;
synchronized (lockerThreadsLock) {
if (lockerThreadsList == null) {
return ;//false;
}
blockingThreads = new ArrayList<>(lockerThreadsList);
}
List<ThreadReference> resumedThreads = new ArrayList<ThreadReference>(blockingThreads.size());
for (JPDAThread t : blockingThreads) {
if (t.isSuspended()) {
t.resume();
resumedThreads.add(((JPDAThreadImpl) t).getThreadReference());
}
}
synchronized (lockerThreadsLock) {
this.resumedBlockingThreads = resumedThreads;
}
return ;//true;
}
private void submitMonitorEnteredRequest(EventRequest monitorEnteredRequest) throws InternalExceptionWrapper, VMDisconnectedExceptionWrapper, ObjectCollectedExceptionWrapper {
EventRequestWrapper.setSuspendPolicy(monitorEnteredRequest, EventRequest.SUSPEND_ALL);
EventRequestWrapper.putProperty(monitorEnteredRequest, Operator.SILENT_EVENT_PROPERTY, Boolean.TRUE);
debugger.getOperator().register(monitorEnteredRequest, new Executor() {
@Override
public boolean exec(Event event) {
try {
try {
//MonitorContendedEnteredEvent monitorEnteredEvent = (MonitorContendedEnteredEvent) event;
EventRequestManagerWrapper.deleteEventRequest(
VirtualMachineWrapper.eventRequestManager(vm),
EventWrapper.request(event));
} catch (InvalidRequestStateExceptionWrapper ex) {}
debugger.getOperator().unregister(EventWrapper.request(event));
} catch (InternalExceptionWrapper ex) {
return true;
} catch (VMDisconnectedExceptionWrapper ex) {
return true;
}
List<JPDAThread> oldLockerThreadsList;
List<ThreadReference> threadsToSuspend;
synchronized (lockerThreadsLock) {
oldLockerThreadsList = lockerThreadsList;
//lockerThreads2 = null;
lockerThreadsMonitor = null;
lockerThreadsList = null;
threadsToSuspend = resumedBlockingThreads;
}
pch.firePropertyChange(PROP_LOCKER_THREADS, oldLockerThreadsList, null);
//System.err.println("Monitor freed, threadsToSuspend = "+threadsToSuspend);
if (threadsToSuspend != null) {
for (ThreadReference tr : threadsToSuspend) {
try {
ThreadReferenceWrapper.suspend(tr); // Increases the suspend count to 2 so that it's not resumed by EventSet.resume()
} catch (IllegalThreadStateExceptionWrapper iex) {
// The thread is gone
} catch (InternalExceptionWrapper iex) {
// ??
} catch (ObjectCollectedExceptionWrapper ocex) {
// The thread is gone
} catch (VMDisconnectedExceptionWrapper vdex) {}
JPDAThreadImpl t = debugger.getExistingThread(tr);
if (t != null) {
t.notifySuspended();
}
//System.err.println(" Suspending "+t.getName()+" after monitor obtained.");
}
}
if (isMethodInvoking()) {
// HACK because of JDI:
// When invoking a method, EventSet.resume() will not resume the invocation thread
// We have to do it explicitely a suspend the thread right after the invocation, 'resumedToFinishMethodInvocation' flag is used for that.
debugger.getRequestProcessor().post(new Runnable() {
@Override
public void run() {
accessLock.writeLock().lock();
try {
logger.fine("Resuming thread "+threadName+" to finish method invoke...");
resumedToFinishMethodInvocation = true;
ThreadReferenceWrapper.resume(threadReference);
} catch (IllegalThreadStateExceptionWrapper iex) {
// The thread is gone
} catch (VMDisconnectedExceptionWrapper e) {
// Ignored
} catch (Exception e) {
Exceptions.printStackTrace(e);
} finally {
accessLock.writeLock().unlock();
}
//System.err.println(" Resuming "+getName()+" because of method invocation.");
}
}, 200);
}
return true;
}
@Override
public void removed(EventRequest eventRequest) {
}
});
try {
EventRequestWrapper.enable(monitorEnteredRequest);
} catch (InternalExceptionWrapper ex) {
debugger.getOperator().unregister(monitorEnteredRequest);
throw ex;
} catch (ObjectCollectedExceptionWrapper ocex) {
debugger.getOperator().unregister(monitorEnteredRequest);
throw ocex;
} catch (InvalidRequestStateExceptionWrapper irse) {
Exceptions.printStackTrace(irse);
}
}
private boolean submitMonitorEnteredFor(ObjectReference waitingMonitor) {
if (!VirtualMachineWrapper.canRequestMonitorEvents0(vm)) {
return false;
}
try {
com.sun.jdi.request.MonitorContendedEnteredRequest monitorEnteredRequest =
EventRequestManagerWrapper.createMonitorContendedEnteredRequest(
VirtualMachineWrapper.eventRequestManager(vm));
MonitorContendedEnteredRequestWrapper.addThreadFilter(monitorEnteredRequest, threadReference);
submitMonitorEnteredRequest(monitorEnteredRequest);
} catch (InternalExceptionWrapper e) {
return false;
} catch (ObjectCollectedExceptionWrapper e) {
return false;
} catch (VMDisconnectedExceptionWrapper e) {
return false;
}
return true;
}
private void submitCheckForMonitorEntered(ObjectReference waitingMonitor) throws InternalExceptionWrapper, VMDisconnectedExceptionWrapper, ObjectCollectedExceptionWrapper, IllegalThreadStateExceptionWrapper {
try {
ThreadReferenceWrapper.suspend(threadReference);
logger.fine("submitCheckForMonitorEntered(): suspending "+threadName);
ObjectReference monitor = ThreadReferenceWrapper.currentContendedMonitor(threadReference);
if (monitor == null) return ;
Location loc = StackFrameWrapper.location(ThreadReferenceWrapper.frame(threadReference, 0));
loc = MethodWrapper.locationOfCodeIndex(LocationWrapper.method(loc), LocationWrapper.codeIndex(loc) + 1);
if (loc == null) return;
BreakpointRequest br = EventRequestManagerWrapper.createBreakpointRequest(
VirtualMachineWrapper.eventRequestManager(MirrorWrapper.virtualMachine(threadReference)), loc);
BreakpointRequestWrapper.addThreadFilter(br, threadReference);
submitMonitorEnteredRequest(br);
} catch (IncompatibleThreadStateException itex) {
Exceptions.printStackTrace(itex);
} catch (InvalidStackFrameExceptionWrapper isex) {
Exceptions.printStackTrace(isex);
} finally {
logger.fine("submitCheckForMonitorEntered(): resuming "+threadName);
ThreadReferenceWrapper.resume(threadReference);
}
}
public void setStepSuspendedBy(JPDABreakpoint breakpoint, boolean fire, List<JPDAThreadImpl> steppingThreads) {
synchronized (stepBreakpointLock) {
this.stepSuspendedByBreakpoint = breakpoint;
if (this.suspendedSteppingThreads == null) {
this.suspendedSteppingThreads = new HashSet<>();
}
this.suspendedSteppingThreads.addAll(steppingThreads);
if (logger.isLoggable(Level.FINER)) {
logger.finer("setStepSuspendedBy("+threadReference+"): stepSuspendedByBreakpoint = "+stepSuspendedByBreakpoint+", steppingThreads = "+steppingThreads);
}
}
for (JPDAThreadImpl st : steppingThreads) {
st.addSteppingSuspendedBy(this);
}
if (fire) {
pch.firePropertyChange(PROP_STEP_SUSPENDED_BY_BREAKPOINT, null, breakpoint);
}
}
void addSteppingSuspendedBy(JPDAThreadImpl thread) {
synchronized (stepBreakpointLock) {
if (this.steppingSuspendedByBptsInThreads == null) {
this.steppingSuspendedByBptsInThreads = new HashSet<>();
}
this.steppingSuspendedByBptsInThreads.add(thread);
}
}
public boolean unsetSteppingSuspendedByBpts() {
Set<JPDAThreadImpl> suspByThreads;
synchronized (stepBreakpointLock) {
suspByThreads = this.steppingSuspendedByBptsInThreads;
this.steppingSuspendedByBptsInThreads = null;
}
if (suspByThreads != null) {
for (JPDAThreadImpl t : suspByThreads) {
t.unsetStepSuspendedByBpIn(this);
}
}
return suspByThreads != null;
}
void unsetStepSuspendedByBpIn(JPDAThreadImpl thread) {
JPDABreakpoint oldBp;
synchronized (stepBreakpointLock) {
oldBp = this.stepSuspendedByBreakpoint;
this.stepSuspendedByBreakpoint = null;
if (logger.isLoggable(Level.FINER)) {
logger.finer("unsetStepSuspendedByBp("+threadReference+"): stepSuspendedByBreakpoint = "+stepSuspendedByBreakpoint);
}
if (this.suspendedSteppingThreads == null) {
this.suspendedSteppingThreads.remove(thread);
if (this.suspendedSteppingThreads.isEmpty()) {
this.suspendedSteppingThreads = null;
}
}
}
if (oldBp != null) {
pch.firePropertyChange(PROP_STEP_SUSPENDED_BY_BREAKPOINT, oldBp, null);
}
//return oldBp != null;
}
private JPDABreakpoint removeSuspendedByBreakpoint() {
JPDABreakpoint brkp = null;
Set<JPDAThreadImpl> steppingThreads = null;
synchronized (stepBreakpointLock) {
if (stepSuspendedByBreakpoint != null) {
brkp = stepSuspendedByBreakpoint;
stepSuspendedByBreakpoint = null;
if (logger.isLoggable(Level.FINER)) {
logger.finer("removeSuspendedByBreakpoint("+threadReference+"): stepSuspendedByBreakpoint = "+stepSuspendedByBreakpoint);
}
}
if (this.suspendedSteppingThreads != null) {
steppingThreads = this.suspendedSteppingThreads;
this.suspendedSteppingThreads = null;
}
}
if (steppingThreads != null) {
for (JPDAThreadImpl t : steppingThreads) {
t.removeSteppingSuspendedBy(this);
}
}
return brkp;
}
private void removeSteppingSuspendedBy(JPDAThreadImpl thread) {
synchronized (stepBreakpointLock) {
if (this.steppingSuspendedByBptsInThreads != null) {
this.steppingSuspendedByBptsInThreads.remove(thread);
if (this.steppingSuspendedByBptsInThreads.isEmpty()) {
this.steppingSuspendedByBptsInThreads = null;
}
}
}
}
public String getThreadStateLog() {
return getThreadStateLog(threadReference)+", internal suspend status = "+suspended+", suspendedNoFire = "+suspendedNoFire+", suspendedOnAnEvent = "+suspendedOnAnEvent+", invoking a method = "+methodInvoking+", is in step = "+inStep;
}
public static String getThreadStateLog(ThreadReference threadReference) {
String name;
try {
name = ThreadReferenceWrapper.name(threadReference);
} catch (InternalExceptionWrapper | VMDisconnectedExceptionWrapper |
ObjectCollectedExceptionWrapper | IllegalThreadStateExceptionWrapper ex) {
Throwable t = ex.getCause();
name = "<"+t.getClass() + ":" + t.getLocalizedMessage()+">";
}
String status;
try {
status = Integer.toString(ThreadReferenceWrapper.status(threadReference));
} catch (InternalExceptionWrapper | VMDisconnectedExceptionWrapper |
ObjectCollectedExceptionWrapper | IllegalThreadStateExceptionWrapper ex) {
Throwable t = ex.getCause();
status = "<"+t.getClass() + ":" + t.getLocalizedMessage()+">";
}
String isSuspended;
try {
isSuspended = Boolean.toString(ThreadReferenceWrapper.isSuspended(threadReference));
} catch (InternalExceptionWrapper | VMDisconnectedExceptionWrapper |
ObjectCollectedExceptionWrapper | IllegalThreadStateExceptionWrapper ex) {
Throwable t = ex.getCause();
isSuspended = "<"+t.getClass() + ":" + t.getLocalizedMessage()+">";
}
String suspendCount;
try {
suspendCount = Integer.toString(ThreadReferenceWrapper.suspendCount(threadReference));
} catch (InternalExceptionWrapper | VMDisconnectedExceptionWrapper |
ObjectCollectedExceptionWrapper | IllegalThreadStateExceptionWrapper ex) {
Throwable t = ex.getCause();
suspendCount = "<"+t.getClass() + ":" + t.getLocalizedMessage()+">";
}
String isAtBreakpoint;
try {
isAtBreakpoint = Boolean.toString(ThreadReferenceWrapper.isAtBreakpoint(threadReference));
} catch (InternalExceptionWrapper | VMDisconnectedExceptionWrapper |
ObjectCollectedExceptionWrapper | IllegalThreadStateExceptionWrapper ex) {
Throwable t = ex.getCause();
isAtBreakpoint = "<"+t.getClass() + ":" + t.getLocalizedMessage()+">";
}
String msg = "Thread '"+name+
"': status = "+status+
", is suspended = "+isSuspended+
", suspend count = "+suspendCount+
", is at breakpoint = "+isAtBreakpoint;
return msg;
}
@Override
public String toString() {
return "'"+getName()+"' ("+Integer.toHexString(System.identityHashCode(this))+") from DBG("+Integer.toHexString(debugger.hashCode())+")";
}
private final Object pendingActionsLock = new Object();
private Object pendingAction;
private Variable pendingVariable;
public void setPendingAction(Object action) {
synchronized (pendingActionsLock) {
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, "{0} setPendingAction({1})", new Object[]{threadName, action});
}
this.pendingAction = action;
this.pendingVariable = null;
}
}
public Object getPendingAction() {
synchronized (pendingActionsLock) {
return pendingAction;
}
}
public String getPendingString(Object action) {
return NbBundle.getMessage(JPDAThreadImpl.class, "MSG_PendingAction", action);
}
public Variable getPendingVariable(Object action) {
Variable var;
synchronized (pendingActionsLock) {
var = (action == pendingAction) ? pendingVariable : null;
}
if (var == null) {
StringReference sr = threadReference.virtualMachine().mirrorOf(getPendingString(action));
var = new AbstractObjectVariable (debugger, sr, null);
}
synchronized (pendingActionsLock) {
if (action == pendingAction) {
pendingVariable = var;
}
}
return var;
}
private static class ThreadListDelegate extends AbstractList<JPDAThread> {
private List<ThreadReference> threads;
private JPDADebuggerImpl debugger;
public ThreadListDelegate(JPDADebuggerImpl debugger, List<ThreadReference> threads) {
this.debugger = debugger;
this.threads = threads;
}
@Override
public JPDAThread get(int index) {
return debugger.getThread(threads.get(index));
}
@Override
public int size() {
return threads.size();
}
}
private class ThreadReentrantReadWriteLock extends ReentrantReadWriteLock {
private final ReentrantReadWriteLock.ReadLock readerLock;
private final ReentrantReadWriteLock.WriteLock writerLock;
private ThreadReentrantReadWriteLock() {
super(true);
readerLock = new ThreadReadLock();
writerLock = new ThreadWriteLock();
}
@Override
public ReadLock readLock() {
return readerLock;
}
@Override
public WriteLock writeLock() {
return writerLock;
}
private class ThreadReadLock extends ReadLock {
private ThreadReadLock() {
super(ThreadReentrantReadWriteLock.this);
}
@Override
public void lock() {
debugger.accessLock.readLock().lock();
super.lock();
}
@Override
public void lockInterruptibly() throws InterruptedException {
debugger.accessLock.readLock().lockInterruptibly();
try {
super.lockInterruptibly();
} catch (InterruptedException iex) {
debugger.accessLock.readLock().unlock();
throw iex;
}
}
@Override
public boolean tryLock() {
boolean locked = debugger.accessLock.readLock().tryLock();
if (locked) {
locked = super.tryLock();
if (!locked) {
debugger.accessLock.readLock().unlock();
}
}
return locked;
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
boolean locked = debugger.accessLock.readLock().tryLock(timeout, unit);
if (locked) {
locked = super.tryLock(timeout, unit);
if (!locked) {
debugger.accessLock.readLock().unlock();
}
}
return locked;
}
@Override
public void unlock() {
super.unlock();
debugger.accessLock.readLock().unlock();
}
}
private class ThreadWriteLock extends WriteLock {
private ThreadWriteLock() {
super(ThreadReentrantReadWriteLock.this);
}
@Override
public void lock() {
debugger.accessLock.readLock().lock();
super.lock();
}
@Override
public void lockInterruptibly() throws InterruptedException {
debugger.accessLock.readLock().lockInterruptibly();
try {
super.lockInterruptibly();
} catch (InterruptedException iex) {
debugger.accessLock.readLock().unlock();
throw iex;
}
}
@Override
public boolean tryLock() {
boolean locked = debugger.accessLock.readLock().tryLock();
if (locked) {
locked = super.tryLock();
if (!locked) {
debugger.accessLock.readLock().unlock();
}
}
return locked;
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
boolean locked = debugger.accessLock.readLock().tryLock(timeout, unit);
if (locked) {
locked = super.tryLock(timeout, unit);
if (!locked) {
debugger.accessLock.readLock().unlock();
}
}
return locked;
}
@Override
public void unlock() {
super.unlock();
debugger.accessLock.readLock().unlock();
}
}
}
}
| 52,596 |
697 | <reponame>clinuxrulz/sodium
package pump;
public class Sale
{
public Sale(Fuel fuel, double price, double cost, double quantity) {
this.fuel = fuel;
this.price = price;
this.cost = cost;
this.quantity = quantity;
}
public final Fuel fuel;
public final double price;
public final double cost;
public final double quantity;
}
| 145 |
45,293 | <gh_stars>1000+
/*
* Copyright 2010-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "KAssert.h"
#include "TypeInfo.h"
extern "C" {
// Seeks for the specified id. In case of failure returns a valid pointer to some record, never returns nullptr.
// It is the caller's responsibility to check if the search has succeeded or not.
InterfaceTableRecord const* LookupInterfaceTableRecord(InterfaceTableRecord const* interfaceTable,
int interfaceTableSize, ClassId interfaceId) {
if (interfaceTableSize <= 8) {
// Linear search.
int i;
for (i = 0; i < interfaceTableSize - 1 && interfaceTable[i].id < interfaceId; ++i);
return interfaceTable + i;
}
int l = 0, r = interfaceTableSize - 1;
while (l < r) {
int m = (l + r) / 2;
if (interfaceTable[m].id < interfaceId)
l = m + 1;
else r = m;
}
return interfaceTable + l;
}
}
| 486 |
1,909 | package org.knowm.xchange.bitcoinde.dto.marketdata;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Arrays;
public class BitcoindeOrders {
private final BitcoindeOrder[] bids;
private final BitcoindeOrder[] asks;
public BitcoindeOrders(
@JsonProperty("bids") BitcoindeOrder[] bids, @JsonProperty("asks") BitcoindeOrder[] asks) {
this.bids = bids;
this.asks = asks;
}
public BitcoindeOrder[] getBids() {
return bids;
}
public BitcoindeOrder[] getAsks() {
return asks;
}
@Override
public String toString() {
return "BitcoindeOrders{"
+ "bids="
+ Arrays.toString(bids)
+ ", asks="
+ Arrays.toString(asks)
+ '}';
}
}
| 306 |
977 | <filename>Sources/Physics/Colliders/CapsuleCollider.cpp<gh_stars>100-1000
#include "CapsuleCollider.hpp"
#include <BulletCollision/CollisionShapes/btCapsuleShape.h>
namespace acid {
CapsuleCollider::CapsuleCollider(float radius, float height, const Transform &localTransform) :
//Collider(localTransform),
shape(std::make_unique<btCapsuleShape>(radius, height)),
radius(radius),
height(height) {
this->localTransform = localTransform;
this->localTransform.SetLocalScale({radius, height, radius});
}
CapsuleCollider::~CapsuleCollider() {
}
btCollisionShape *CapsuleCollider::GetCollisionShape() const {
return shape.get();
}
void CapsuleCollider::SetRadius(float radius) {
this->radius = radius;
shape->setImplicitShapeDimensions({radius, 0.5f * height, radius});
localTransform.SetLocalScale({radius, height, radius});
}
void CapsuleCollider::SetHeight(float height) {
this->height = height;
shape->setImplicitShapeDimensions({radius, 0.5f * height, radius});
localTransform.SetLocalScale({radius, height, radius});
}
const Node &operator>>(const Node &node, CapsuleCollider &collider) {
node["localTransform"].Get(collider.localTransform);
node["radius"].Get(collider.radius);
node["height"].Get(collider.height);
return node;
}
Node &operator<<(Node &node, const CapsuleCollider &collider) {
node["localTransform"].Set(collider.localTransform);
node["radius"].Set(collider.radius);
node["height"].Set(collider.height);
return node;
}
}
| 485 |
1,457 | <reponame>Actis92/lightning-flash
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, List, Optional, Type
from torch.utils.data import Dataset
from flash.core.data.data_module import DataModule
from flash.core.data.data_pipeline import DataPipelineState
from flash.core.data.io.input import Input
from flash.core.data.io.input_transform import InputTransform
from flash.core.utilities.stages import RunningStage
from flash.core.utilities.types import INPUT_TRANSFORM_TYPE
from flash.pointcloud.segmentation.input import PointCloudSegmentationDatasetInput, PointCloudSegmentationFoldersInput
class PointCloudSegmentationData(DataModule):
input_transform_cls = InputTransform
@classmethod
def from_folders(
cls,
train_folder: Optional[str] = None,
val_folder: Optional[str] = None,
test_folder: Optional[str] = None,
predict_folder: Optional[str] = None,
train_transform: INPUT_TRANSFORM_TYPE = InputTransform,
val_transform: INPUT_TRANSFORM_TYPE = InputTransform,
test_transform: INPUT_TRANSFORM_TYPE = InputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
input_cls: Type[Input] = PointCloudSegmentationFoldersInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "PointCloudSegmentationData":
ds_kw = dict(
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
input_cls(RunningStage.TRAINING, train_folder, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_folder, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_folder, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_folder, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_files(
cls,
predict_files: Optional[List[str]] = None,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
input_cls: Type[Input] = PointCloudSegmentationFoldersInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "PointCloudSegmentationData":
ds_kw = dict(
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
predict_input=input_cls(RunningStage.PREDICTING, predict_files, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_datasets(
cls,
train_dataset: Optional[Dataset] = None,
val_dataset: Optional[Dataset] = None,
test_dataset: Optional[Dataset] = None,
predict_dataset: Optional[Dataset] = None,
train_transform: INPUT_TRANSFORM_TYPE = InputTransform,
val_transform: INPUT_TRANSFORM_TYPE = InputTransform,
test_transform: INPUT_TRANSFORM_TYPE = InputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
input_cls: Type[Input] = PointCloudSegmentationDatasetInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "PointCloudSegmentationData":
ds_kw = dict(
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
input_cls(RunningStage.TRAINING, train_dataset, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_dataset, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_dataset, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_dataset, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
| 1,872 |
1,338 | <reponame>Kirishikesan/haiku
/*
* Copyright (C) 1999-2001 Free Software Foundation, Inc.
* This file is part of the GNU LIBICONV Library.
*
* The GNU LIBICONV Library is free software; you can redistribute it
* and/or modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* The GNU LIBICONV Library is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with the GNU LIBICONV Library; see the file COPYING.LIB.
* If not, write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301, USA.
*/
/*
* GB/T 12345-1990
*/
/*
* GB/T 12345-1990 is a traditional chinese counterpart of GB 2312-1986.
* According to the unicode.org tables:
* 2146 characters have been changed to their traditional counterpart,
* 103 characters have been added, no characters have been removed.
* Therefore we use an auxiliary table, which contains only the changes.
*/
#include "gb12345ext.h"
static int
gb12345_mbtowc (conv_t conv, ucs4_t *pwc, const unsigned char *s, int n)
{
int ret;
/* The gb12345ext table overrides some entries in the gb2312 table. */
/* Try the GB12345 extensions -> Unicode table. */
ret = gb12345ext_mbtowc(conv,pwc,s,n);
if (ret != RET_ILSEQ)
return ret;
/* Try the GB2312 -> Unicode table. */
ret = gb2312_mbtowc(conv,pwc,s,n);
return ret;
}
static int
gb12345_wctomb (conv_t conv, unsigned char *r, ucs4_t wc, int n)
{
int ret;
/* The gb12345ext table overrides some entries in the gb2312 table. */
/* Try the Unicode -> GB12345 extensions table. */
ret = gb12345ext_wctomb(conv,r,wc,n);
if (ret != RET_ILUNI)
return ret;
/* Try the Unicode -> GB2312 table, and check that the resulting GB2312
byte sequence is not overridden by the GB12345 extensions table. */
ret = gb2312_wctomb(conv,r,wc,n);
if (ret == 2 && gb12345ext_mbtowc(conv,&wc,r,2) == 2)
return RET_ILUNI;
else
return ret;
}
| 767 |
335 | <reponame>Safal08/Hacktoberfest-1
{
"word": "Trait",
"definitions": [
"A distinguishing quality or characteristic, typically one belonging to a person.",
"A genetically determined characteristic."
],
"parts-of-speech": "Noun"
} | 96 |
571 | /*
* Copyright (c) 2020-2021 <NAME> <EMAIL>
* zlib License, see LICENSE file.
*/
#ifndef BN_HBE_PTR_H
#define BN_HBE_PTR_H
/**
* @file
* bn::hbe_ptr header file.
*
* @ingroup hblank_effect
*/
#include "bn_utility.h"
#include "bn_span_fwd.h"
#include "bn_functional.h"
namespace bn
{
/**
* @brief std::shared_ptr like smart pointer that retains shared ownership of a H-Blank effect.
*
* Several hbe_ptr objects may own the same H-Blank effect.
*
* The H-Blank effect is released when the last remaining hbe_ptr owning it is destroyed.
*
* @ingroup hblank_effect
*/
class hbe_ptr
{
public:
/**
* @brief Releases the referenced H-Blank effect if no more hbe_ptr objects reference to it.
*/
~hbe_ptr()
{
if(_id >= 0)
{
_destroy();
}
}
/**
* @brief Returns the internal id.
*/
[[nodiscard]] int id() const
{
return _id;
}
/**
* @brief Indicates if this H-Blank effect must be committed to the GBA or not.
*/
[[nodiscard]] bool visible() const;
/**
* @brief Sets if this H-Blank effect must be committed to the GBA or not.
*/
void set_visible(bool visible);
/**
* @brief Default equal operator.
*/
[[nodiscard]] friend bool operator==(const hbe_ptr& a, const hbe_ptr& b) = default;
protected:
/// @cond DO_NOT_DOCUMENT
explicit hbe_ptr(int id) :
_id(int8_t(id))
{
}
hbe_ptr(const hbe_ptr& other);
hbe_ptr& operator=(const hbe_ptr& other);
hbe_ptr(hbe_ptr&& other) noexcept :
hbe_ptr(other._id)
{
other._id = -1;
}
hbe_ptr& operator=(hbe_ptr&& other) noexcept
{
bn::swap(_id, other._id);
return *this;
}
void swap(hbe_ptr& other)
{
bn::swap(_id, other._id);
}
friend void swap(hbe_ptr& a, hbe_ptr& b)
{
bn::swap(a._id, b._id);
}
/// @endcond
private:
int8_t _id;
void _destroy();
};
/**
* @brief Hash support for hbe_ptr.
*
* @ingroup hblank_effect
* @ingroup functional
*/
template<>
struct hash<hbe_ptr>
{
/**
* @brief Returns the hash of the given hbe_ptr.
*/
[[nodiscard]] unsigned operator()(const hbe_ptr& value) const
{
return make_hash(value.id());
}
};
}
#endif
| 1,054 |
401 | /*
* Hedgewars for Android. An Android port of Hedgewars, a free turn based strategy game
* Copyright (c) 2011-2012 <NAME> <<EMAIL>>
* Copyright (C) 2012 <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.hedgewars.hedgeroid.Datastructures;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hedgewars.hedgeroid.R;
import org.hedgewars.hedgeroid.util.FileUtils;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class FrontendDataUtils {
/**
* @throws FileNotFoundException if the sdcard isn't available or the Maps directory doesn't exist
*/
public static List<MapFile> getMaps(Context c) throws FileNotFoundException {
File[] files = FileUtils.getFilesFromRelativeDir(c,"Maps");
List<MapFile> ret = new ArrayList<MapFile>();
for(File f : files) {
boolean isMission = FileUtils.hasFileWithSuffix(f, ".lua");
ret.add(new MapFile(f.getName(), isMission));
}
return ret;
}
/**
* Returns a list of all multiplayer scripts (game styles)
* @throws FileNotFoundException if the sdcard isn't available or the Scripts/Multiplayer directory doesn't exist
*/
public static List<String> getGameStyles(Context c) throws FileNotFoundException {
File[] files = FileUtils.getFilesFromRelativeDir(c, "Scripts/Multiplayer");
List<String> ret = new ArrayList<String>();
/*
* Caution: It is important that the "empty" style has this exact name, because
* it will be interpreted as "don't load a script" by the frontlib, and also by
* the QtFrontend in a netgame. This should probably be improved some time
* (maybe TODO add a dummy script called "Normal" to the MP scripts?)
*/
ret.add("Normal");
for(int i = 0; i < files.length; i++) {
String name = files[i].getName();
if(name.endsWith(".lua")){
//replace _ by a space and removed the last four characters (.lua)
ret.add(name.replace('_', ' ').substring(0, name.length()-4));
}
}
return ret;
}
/**
* @throws FileNotFoundException if the sdcard isn't available or the Themes directory doesn't exist
*/
public static List<String> getThemes(Context c) throws FileNotFoundException {
return FileUtils.getDirsWithFileSuffix(c, "Themes", "icon.png");
}
/**
* @throws FileNotFoundException if the sdcard isn't available or the Graphics/Graves directory doesn't exist
*/
public static List<Map<String, ?>> getGraves(Context c) throws FileNotFoundException {
File gravePath = FileUtils.getDataPathFile(c, "Graphics", "Graves");
List<String> names = FileUtils.getFileNamesFromDirWithSuffix(c,"Graphics/Graves", ".png", true);
List<Map<String, ?>> data = new ArrayList<Map<String, ?>>(names.size());
for(String s : names){
HashMap<String, Object> map = new HashMap<String, Object>();
map.put("txt", s);
Bitmap b = BitmapFactory.decodeFile(new File(gravePath, s + ".png").getAbsolutePath());
int width = b.getWidth();
if(b.getHeight() > width){
// some pictures contain more 'frames' underneath each other, if so we only use the first frame
b = Bitmap.createBitmap(b, 0, 0, width, width);
}
map.put("img", b);
data.add(map);
}
return data;
}
/**
* @throws FileNotFoundException if the sdcard isn't available or the Graphics/Graves directory doesn't exist
*/
public static List<Map<String, ?>> getFlags(Context c) throws FileNotFoundException {
File flagsPath = FileUtils.getDataPathFile(c, "Graphics", "Flags");
List<String> names = FileUtils.getFileNamesFromDirWithSuffix(c, "Graphics/Flags", ".png", true);
List<Map<String, ?>> data = new ArrayList<Map<String, ?>>(names.size());
for(String s : names){
Map<String, Object> map = new HashMap<String, Object>();
map.put("txt", s);
Bitmap b = BitmapFactory.decodeFile(new File(flagsPath, s + ".png").getAbsolutePath());
map.put("img", b);
data.add(map);
}
return data;
}
/**
* @throws FileNotFoundException if the sdcard isn't available or the Sounds/voices directory doesn't exist
*/
public static List<String> getVoices(Context c) throws FileNotFoundException {
File[] files = FileUtils.getFilesFromRelativeDir(c, "Sounds/voices");
List<String> ret = new ArrayList<String>();
for(File f : files){
if(f.isDirectory()) ret.add(f.getName());
}
return ret;
}
/**
* @throws FileNotFoundException if the sdcard isn't available or the Forts directory doesn't exist
*/
public static List<String> getForts(Context c) throws FileNotFoundException {
return FileUtils.getFileNamesFromDirWithSuffix(c,"Forts", "L.png", true);
}
public static List<Map<String, ?>> getTypes(Context c){
List<Map<String, ?>> data = new ArrayList<Map<String, ?>>(6);
String[] levels = {c.getString(R.string.human), c.getString(R.string.bot5), c.getString(R.string.bot4), c.getString(R.string.bot3), c.getString(R.string.bot2), c.getString(R.string.bot1)};
int[] images = {R.drawable.human, R.drawable.bot5, R.drawable.bot4, R.drawable.bot3, R.drawable.bot2, R.drawable.bot1};
for(int i = 0; i < levels.length; i++){
Map<String, Object> map = new HashMap<String, Object>();
map.put("txt", levels[i]);
map.put("img", images[i]);
map.put("level", i);
data.add(map);
}
return data;
}
/**
* @throws FileNotFoundException if the sdcard isn't available or the Graphics/Hats directory doesn't exist
*/
public static List<Map<String, ?>> getHats(Context c) throws FileNotFoundException {
List<String> files = FileUtils.getFileNamesFromDirWithSuffix(c,"Graphics/Hats", ".png", true);
File hatsPath = FileUtils.getDataPathFile(c, "Graphics", "Hats");
int size = files.size();
List<Map<String, ?>> data = new ArrayList<Map<String, ?>>(size);
for(String s : files){
Map<String, Object> hashmap = new HashMap<String, Object>();
hashmap.put("txt", s);
Bitmap b = BitmapFactory.decodeFile(new File(hatsPath, s + ".png").getAbsolutePath());
b = Bitmap.createBitmap(b, 0,0,b.getWidth()/2, b.getWidth()/2);
hashmap.put("img", b);
data.add(hashmap);
}
return data;
}
public static List<Team> getTeams(Context c) {
List<Team> ret = new ArrayList<Team>();
File teamsDir = new File(c.getFilesDir(), Team.DIRECTORY_TEAMS);
File[] teamFileNames = teamsDir.listFiles();
if(teamFileNames != null){
for(File file : teamFileNames){
if(file.getName().endsWith(".hwt")) {
Team team = Team.load(file);
if(team != null){
ret.add(team);
}
}
}
}
return ret;
}
}
| 2,556 |
465 | <filename>Code/Chenglong/feature_wordnet_similarity.py
# -*- coding: utf-8 -*-
"""
@author: <NAME> <<EMAIL>>
@brief: wordnet similarity based features (veeerrry time consuming)
@note: in our final submission, we are only able to generate WordNet_Path_Similarity between
search_term and product_title in reasonable time.
"""
"""
http://stackoverflow.com/questions/16877517/compare-similarity-of-terms-expressions-using-nltk
http://stackoverflow.com/questions/22031968/how-to-find-distance-between-two-synset-using-python-nltk-in-wordnet-hierarchy
#----------------------------------------------------------------------------------------
Path similarity, wup_similarity and lch_similarity, all of these should work
since they are based on the distance between two synsets in the Wordnet hierarchy.
dog = wn.synset('dog.n.01')
cat = wn.synset('cat.n.01')
dog.path_similarity(cat)
dog.lch_similarity(cat)
dog.wup_similarity(cat)
#----------------------------------------------------------------------------------------
synset1.path_similarity(synset2):
Return a score denoting how similar two word senses are, based on the shortest
path that connects the senses in the is-a (hypernym/hypnoym) taxonomy. The
score is in the range 0 to 1, except in those cases where a path cannot be
found (will only be true for verbs as there are many distinct verb taxonomies),
in which case -1 is returned. A score of 1 represents identity i.e. comparing
a sense with itself will return 1.
#----------------------------------------------------------------------------------------
synset1.lch_similarity(synset2), Leacock-Chodorow Similarity:
Return a score denoting how similar two word senses are, based on the shortest
path that connects the senses (as above) and the maximum depth of the taxonomy
in which the senses occur. The relationship is given as -log(p/2d) where p is
the shortest path length and d the taxonomy depth.
#----------------------------------------------------------------------------------------
synset1.wup_similarity(synset2), Wu-Palmer Similarity:
Return a score denoting how similar two word senses are, based on the depth of the
two senses in the taxonomy and that of their Least Common Subsumer (most specific
ancestor node). Note that at this time the scores given do not always agree with
those given by Pedersen's Perl implementation of Wordnet Similarity.
"""
import string
import numpy as np
import pandas as pd
from nltk.corpus import wordnet as wn
import config
from utils import dist_utils, ngram_utils, nlp_utils, pkl_utils
from utils import logging_utils, time_utils
from feature_base import BaseEstimator, PairwiseFeatureWrapper
# tune the token pattern to get a better correlation with y_train
# token_pattern = r"(?u)\b\w\w+\b"
# token_pattern = r"\w{1,}"
# token_pattern = r"\w+"
# token_pattern = r"[\w']+"
token_pattern = " " # just split the text into tokens
class WordNet_Similarity(BaseEstimator):
"""Double aggregation features"""
def __init__(self, obs_corpus, target_corpus, metric="path", aggregation_mode_prev="", aggregation_mode=""):
super().__init__(obs_corpus, target_corpus, aggregation_mode, None, aggregation_mode_prev)
self.metric = metric
if self.metric == "path":
self.metric_func = lambda syn1, syn2: wn.path_similarity(syn1, syn2)
elif self.metric == "lch":
self.metric_func = lambda syn1, syn2: wn.lch_similarity(syn1, syn2)
elif self.metric == "wup":
self.metric_func = lambda syn1, syn2: wn.wup_similarity(syn1, syn2)
else:
raise(ValueError("Wrong similarity metric: %s, should be one of path/lch/wup."%self.metric))
def __name__(self):
feat_name = []
for m1 in self.aggregation_mode_prev:
for m in self.aggregation_mode:
n = "WordNet_%s_Similarity_%s_%s"%(
string.capwords(self.metric), string.capwords(m1), string.capwords(m))
feat_name.append(n)
return feat_name
def _maximum_similarity_for_two_synset_list(self, syn_list1, syn_list2):
s = 0.
if syn_list1 and syn_list2:
for syn1 in syn_list1:
for syn2 in syn_list2:
try:
_s = self.metric_func(syn1, syn2)
except:
_s = config.MISSING_VALUE_NUMERIC
if _s and _s > s:
s = _s
return s
def transform_one(self, obs, target, id):
obs_tokens = nlp_utils._tokenize(obs, token_pattern)
target_tokens = nlp_utils._tokenize(target, token_pattern)
obs_synset_list = [wn.synsets(obs_token) for obs_token in obs_tokens]
target_synset_list = [wn.synsets(target_token) for target_token in target_tokens]
val_list = []
for obs_synset in obs_synset_list:
_val_list = []
for target_synset in target_synset_list:
_s = self._maximum_similarity_for_two_synset_list(obs_synset, target_synset)
_val_list.append(_s)
if len(_val_list) == 0:
_val_list = [config.MISSING_VALUE_NUMERIC]
val_list.append( _val_list )
if len(val_list) == 0:
val_list = [[config.MISSING_VALUE_NUMERIC]]
return val_list
class WordNet_Path_Similarity(WordNet_Similarity):
def __init__(self, obs_corpus, target_corpus, aggregation_mode_prev="", aggregation_mode=""):
super().__init__(obs_corpus, target_corpus, "path", aggregation_mode_prev, aggregation_mode)
class WordNet_Lch_Similarity(WordNet_Similarity):
def __init__(self, obs_corpus, target_corpus, aggregation_mode_prev="", aggregation_mode=""):
super().__init__(obs_corpus, target_corpus, "lch", aggregation_mode_prev, aggregation_mode)
class WordNet_Wup_Similarity(WordNet_Similarity):
def __init__(self, obs_corpus, target_corpus, aggregation_mode_prev="", aggregation_mode=""):
super().__init__(obs_corpus, target_corpus, "wup", aggregation_mode_prev, aggregation_mode)
# ---------------------------- Main --------------------------------------
def main():
logname = "generate_feature_wordnet_similarity_%s.log"%time_utils._timestamp()
logger = logging_utils._get_logger(config.LOG_DIR, logname)
#### NOTE: use data BEFORE STEMMING
dfAll = pkl_utils._load(config.ALL_DATA_LEMMATIZED)
# WordNet_Lch_Similarity and WordNet_Wup_Similarity are not used in final submission
generators = [
WordNet_Path_Similarity,
WordNet_Lch_Similarity,
WordNet_Wup_Similarity,
][:1]
obs_fields_list = []
target_fields_list = []
# only search_term and product_title are used in final submission
obs_fields_list.append( ["search_term", "search_term_alt", "search_term_auto_corrected"][:1] )
target_fields_list.append( ["product_title", "product_description", "product_attribute"][:1] )
# double aggregation
aggregation_mode_prev = ["mean", "max", "min", "median"]
aggregation_mode = ["mean", "std", "max", "min", "median"]
for obs_fields, target_fields in zip(obs_fields_list, target_fields_list):
for generator in generators:
param_list = [aggregation_mode_prev, aggregation_mode]
pf = PairwiseFeatureWrapper(generator, dfAll, obs_fields, target_fields, param_list, config.FEAT_DIR, logger)
pf.go()
if __name__ == "__main__":
main()
| 2,889 |
1,491 | #include "fio_cli.h"
#include "http.h"
static void on_response(http_s *h);
int main(int argc, char const *argv[]) {
fio_cli_start(
argc, argv, 1, 1,
"This is an HTTP client example, use:\n"
"\n\tfioapp http://example.com/foo\n",
FIO_CLI_STRING("-unix -u Unix Socket address (has no place in url)."));
http_connect(fio_cli_unnamed(0), fio_cli_get("-u"),
.on_response = on_response);
fio_start(.threads = 1);
return 0;
}
static void on_response(http_s *h) {
if (h->status_str == FIOBJ_INVALID) {
/* first response is always empty, nothing was sent yet */
http_finish(h);
return;
}
/* Second response is actual response */
FIOBJ r = http_req2str(h);
fprintf(stderr, "%s\n", fiobj_obj2cstr(r).data);
fio_stop();
}
| 347 |
413 | <filename>python/minicaffe/craft.py<gh_stars>100-1000
# coding = utf-8
# pylint: disable=too-many-arguments, invalid-name
"""Crafter for generate prototxt string"""
class LayerCrafter(object):
"""Layer Crafter for layer prototxt generation
"""
def __init__(self, **kwargs):
"""parameters for this layer
Parameters
----------
name: string, required
name of this layer
type: string, required
type of this layer, Input, Convolution, ...
bottom: list(string), optional
list of input blob name
top: list(string), optional
list of output blob name
params: dict, optional
extra parameters
"""
assert 'name' in kwargs
assert 'type' in kwargs
self.params = kwargs
def gen(self):
"""generate prototxt for this layer
Returns
-------
prototxt: string
prototxt for this layer
"""
prototxt = self.parse_key_value('layer', self.params)
return prototxt
def parse_key_value(self, key, value, indent=''):
"""parse a key value pair to prototxt string, value can be some type
Parameters
----------
key: string
key
value: string, int, float, bool, list, dict
value to be parsed
string, int, float, bool: directly parsed
list: parse and yield every element
dict: parse and yield every key value pair
indent: string
indent for the line
Returns
-------
s: string
parsed prototxt string
"""
if isinstance(value, str):
return '%s%s: "%s"\n'%(indent, key, value)
elif isinstance(value, bool):
return '%s%s: %s\n'%(indent, key, str(value).lower())
elif isinstance(value, int):
return '%s%s: %d\n'%(indent, key, value)
elif isinstance(value, float):
return '%s%s: %f\n'%(indent, key, value)
elif isinstance(value, list):
s = ""
for v in value:
s += self.parse_key_value(key, v, indent)
return s
elif isinstance(value, dict):
s = "%s%s {\n"%(indent, key)
for key, val in list(value.items()):
s += self.parse_key_value(key, val, indent+'\t')
s += "%s}\n"%(indent)
return s
else:
raise ValueError("unsupported value: %s"%value)
| 1,216 |
1,768 | // Copyright (c) 2003 Compaq Corporation. All rights reserved.
// Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved.
package tla2sany.semantic;
import java.util.Hashtable;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import tla2sany.explorer.ExploreNode;
import tla2sany.explorer.ExplorerVisitor;
import tla2sany.st.TreeNode;
import tla2sany.xml.SymbolContext;
import util.UniqueString;
/**
* This node represents a string literal in the specification--for
* example "abc". The only information added to the SemanticNode
* superclass is the level information and the UniqueString
* representation of the string.
*/
public class StringNode extends ExprNode implements ExploreNode {
private UniqueString value;
public StringNode(TreeNode stn, boolean strip) {
super(StringKind, stn);
this.value = stn.getUS();
if (strip) {
// Strip off quote marks from image in stn
String str = this.value.toString();
str = str.substring(1, str.length()-1);
this.value = UniqueString.uniqueStringOf(str);
/*********************************************************************
* Setting levelChecked shouldn't be necessary. *
*********************************************************************/
// this.levelChecked = 99 ;
}
}
/**
* Returns the UniqueString representation of the string.
*/
public final UniqueString getRep() { return this.value; }
/* Level Checking */
@Override
public final boolean levelCheck(int iter) {
levelChecked = iter;
/*********************************************************************
* Set it just to show that levelCHeck was called. *
*********************************************************************/
return true;
}
// public final int getLevel() { return ConstantLevel; }
//
// public final HashSet getLevelParams() { return EmptySet; }
//
// public final SetOfLevelConstraints getLevelConstraints() {
// return EmptyLC;
// }
//
// public final SetOfArgLevelConstraints getArgLevelConstraints() {
// return EmptyALC;
// }
//
// public final HashSet getArgLevelParams() { return EmptySet; }
/**
* toString, levelDataToString, & walkGraph methods to implement
* ExploreNode interface
*/
// public final String levelDataToString() {
// return "Level: " + this.getLevel() + "\n" +
// "LevelParameters: " + this.getLevelParams() + "\n" +
// "LevelConstraints: " + this.getLevelConstraints() + "\n" +
// "ArgLevelConstraints: " + this.getArgLevelConstraints() + "\n" +
// "ArgLevelParams: " + this.getArgLevelParams() + "\n" ;
// }
@Override
public final void walkGraph(Hashtable<Integer, ExploreNode> semNodesTable, ExplorerVisitor visitor) {
Integer uid = Integer.valueOf(myUID);
if (semNodesTable.get(uid) != null) return;
semNodesTable.put(uid, this);
visitor.preVisit(this);
visitor.postVisit(this);
}
final String PrintVersion(String str) {
StringBuffer buf = new StringBuffer(str.length()) ;
for (int i = 0 ; i < str.length() ; i++) {
switch (str.charAt(i)) {
case '\"' :
buf.append("\\\"") ;
break ;
case '\\' :
buf.append("\\\\") ;
break ;
case '\t' :
buf.append("\\t") ;
break ;
case '\n' :
buf.append("\\n") ;
break ;
case '\f' :
buf.append("\\f") ;
break ;
case '\r' :
buf.append("\\r") ;
break ;
default :
buf.append(str.charAt(i)) ;
break ;
} // switch
}; // for
return buf.toString();
}
@Override
public final String toString(int depth) {
if (depth <= 0) return "";
return "\n*StringNode: " + super.toString(depth)
+ "Value: '" + PrintVersion(value.toString()) +
"'" + " Length: " + value.length();
}
@Override
protected Element getLevelElement(Document doc, SymbolContext context) {
Element e = doc.createElement("StringValue");
Node n = doc.createTextNode(value.toString());
e.appendChild(n);
return appendElement(doc, "StringNode", e);
// return appendText(doc,"StringNode",value.toString());
}
}
| 1,714 |
772 | # Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
from sqlalchemy.orm.session import Session
from fedlearner_webconsole.rpc.client import RpcClient
from fedlearner_webconsole.job.models import Job, JobDependency, JobState
from fedlearner_webconsole.proto import common_pb2
from fedlearner_webconsole.utils.metrics import emit_counter
class JobService:
def __init__(self, session: Session):
self._session = session
def is_ready(self, job: Job) -> bool:
deps = self._session.query(JobDependency).filter_by(
dst_job_id=job.id).all()
for dep in deps:
src_job = self._session.query(Job).get(dep.src_job_id)
assert src_job is not None, 'Job {} not found'.format(
dep.src_job_id)
if not src_job.state == JobState.COMPLETED:
return False
return True
@staticmethod
def is_peer_ready(job: Job) -> bool:
project_config = job.project.get_config()
for party in project_config.participants:
client = RpcClient(project_config, party)
resp = client.check_job_ready(job.name)
if resp.status.code != common_pb2.STATUS_SUCCESS:
emit_counter('check_peer_ready_failed', 1)
return True
if not resp.is_ready:
return False
return True
def update_running_state(self, job_name):
job = self._session.query(Job).filter_by(name=job_name).first()
if job is None:
emit_counter('[JobService]job_not_found', 1)
return
if not job.state == JobState.STARTED:
emit_counter('[JobService]wrong_job_state', 1)
return
if job.is_flapp_complete():
job.complete()
logging.debug('[JobService]change job %s state to %s',
job.name, JobState(job.state))
elif job.is_flapp_failed():
job.fail()
logging.debug('[JobService]change job %s state to %s',
job.name, JobState(job.state))
| 1,112 |
947 | <filename>simpleimage.core/src/main/java/com/alibaba/simpleimage/codec/jpeg/HuffmanTable.java
/*
* Copyright 1999-2101 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.simpleimage.codec.jpeg;
import java.io.IOException;
import com.alibaba.simpleimage.io.ImageInputStream;
public class HuffmanTable {
public static final int TYPE_DC = 0;
public static final int TYPE_AC = 1;
// raw data
private int Lh; // Huffman
private int Tc; // Table
private int Th; // Huffman
private int[] L; // bits
private int[] V; // huffval
// general data
private int[] mincode = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
private int[] maxcode = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
private int[] valptr = new int[16 + 1];
private int[] huffcode = new int[256 + 1];
private int[] huffsize = new int[256 + 1];
public HuffmanTable(int Tc, int Th, int[] bits, int[] huffval){
this.Tc = Tc;
this.Th = Th;
this.L = bits;
this.V = huffval;
init();
}
/**
*TODO JPEG Specification 112
*/
private void init() {
generalHuffsize();
generalHuffcode();
int i = 0, j = 0;
for (;;) {
i++;
if (i > 16) {
break;
}
if (L[i] == 0) {
maxcode[i] = -1;
continue;
} else {
valptr[i] = j;
mincode[i] = huffcode[j];
j = j + L[i] - 1;
maxcode[i] = huffcode[j];
j++;
}
}
maxcode[17] = 0xFFFFFF;
}
private void generalHuffcode() {
int k = 0, code = 0, si = huffsize[0];
for (;;) {
do {
huffcode[k] = code;
code++;
k++;
} while (huffsize[k] == si);
if (huffsize[k] == 0) {
break;
}
do {
code = code << 1;
si++;
} while (huffsize[k] != si);
}
}
private void generalHuffsize() {
int k = 0, i = 1, j = 1;
do {
while (j <= L[i]) {
huffsize[k] = i;
k++;
j++;
}
i++;
j = 1;
} while (i <= 16);
huffsize[k] = 0;
}
public int decode(ImageInputStream in) throws IOException, JPEGMarkerException {
int i = 1, j = 0, code = 0, value = 0;
code = in.readBit();
for (;;) {
if (code > maxcode[i]) {
i++;
code = (code << 1) | in.readBit();
} else {
break;
}
}
j = valptr[i];
j = j + code - mincode[i];
value = V[j];
return value;
}
public int extend(int diff, int t) {
int Vt = 1 << (t - 1); // source logic is Vt = Math.pow(2, t-1);
if (diff < Vt) {
Vt = (-1 << t) + 1;
diff = diff + Vt;
}
return diff;
}
public int getLh() {
return Lh;
}
public void setLh(int lh) {
Lh = lh;
}
public int getTc() {
return Tc;
}
public void setTc(int tc) {
Tc = tc;
}
public int getTh() {
return Th;
}
public void setTh(int th) {
Th = th;
}
public int[] getL() {
return L;
}
public void setL(int[] l) {
L = l;
}
public int[] getV() {
return V;
}
public void setV(int[] v) {
V = v;
}
}
| 2,971 |
1,664 | <reponame>likenamehaojie/Apache-Ambari-ZH<filename>contrib/views/capacity-scheduler/src/main/resources/ui/app/assets/data/resourcemanager.json
{
"href" : "http://c6401.ambari.apache.org:8080/api/v1/clusters/MyCluster/services/YARN/components/RESOURCEMANAGER?fields=host_components",
"ServiceComponentInfo" : {
"cluster_name" : "MyCluster",
"component_name" : "RESOURCEMANAGER",
"service_name" : "YARN"
},
"host_components" : [
{
"href" : "http://c6401.ambari.apache.org:8080/api/v1/clusters/MyCluster/hosts/c6401.ambari.apache.org/host_components/RESOURCEMANAGER",
"HostRoles" : {
"cluster_name" : "MyCluster",
"component_name" : "RESOURCEMANAGER",
"host_name" : "c6401.ambari.apache.org"
}
}
]
} | 354 |
458 | <reponame>minond/ffi-overhead
extern "C" {
#include <stdio.h>
#include "newplus/plus.h"
}
#include <string>
void run(int count)
{
long long start = current_timestamp();
int x = 0;
while (x < count)
x = plusone(x);
printf("%lld\n", current_timestamp() - start);
}
int main(int argc, char** argv)
{
if (argc == 1) {
printf("First arg (0 - 2000000000) is required.\n");
return 1;
}
int count = std::stoi(argv[1]);
if (count <= 0 || count > 2000000000) {
printf("Must be a positive number not exceeding 2 billion.\n");
return 1;
}
// start immediately
run(count);
return 0;
}
| 308 |
1,402 | <reponame>kvmanohar22/gtsam<gh_stars>1000+
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: <NAME>, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file JacobianFactor.h
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @date Dec 8, 2010
*/
#pragma once
#include <gtsam/linear/linearExceptions.h>
namespace gtsam {
/* ************************************************************************* */
template<typename TERMS>
JacobianFactor::JacobianFactor(const TERMS&terms, const Vector &b, const SharedDiagonal& model)
{
fillTerms(terms, b, model);
}
/* ************************************************************************* */
template<typename KEYS>
JacobianFactor::JacobianFactor(
const KEYS& keys, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& model) :
Base(keys), Ab_(augmentedMatrix)
{
// Check noise model dimension
if(model && (DenseIndex)model->dim() != augmentedMatrix.rows())
throw InvalidNoiseModel(augmentedMatrix.rows(), model->dim());
// Check number of variables
if((DenseIndex)Base::keys_.size() != augmentedMatrix.nBlocks() - 1)
throw std::invalid_argument(
"Error in JacobianFactor constructor input. Number of provided keys plus\n"
"one for the RHS vector must equal the number of provided matrix blocks.");
// Check RHS dimension
if(augmentedMatrix(augmentedMatrix.nBlocks() - 1).cols() != 1)
throw std::invalid_argument(
"Error in JacobianFactor constructor input. The last provided matrix block\n"
"must be the RHS vector, but the last provided block had more than one column.");
// Take noise model
model_ = model;
}
/* ************************************************************************* */
template<typename TERMS>
void JacobianFactor::fillTerms(const TERMS& terms, const Vector& b, const SharedDiagonal& noiseModel)
{
// Check noise model dimension
if(noiseModel && (DenseIndex)noiseModel->dim() != b.size())
throw InvalidNoiseModel(b.size(), noiseModel->dim());
// Resize base class key vector
Base::keys_.resize(terms.size());
// Get dimensions of matrices
std::vector<size_t> dimensions;
dimensions.reserve(terms.size());
for(typename TERMS::const_iterator it = terms.begin(); it != terms.end(); ++it) {
const std::pair<Key, Matrix>& term = *it;
const Matrix& Ai = term.second;
dimensions.push_back(Ai.cols());
}
// Construct block matrix
Ab_ = VerticalBlockMatrix(dimensions, b.size(), true);
// Check and add terms
DenseIndex i = 0; // For block index
for(typename TERMS::const_iterator it = terms.begin(); it != terms.end(); ++it) {
const std::pair<Key, Matrix>& term = *it;
Key key = term.first;
const Matrix& Ai = term.second;
// Check block rows
if(Ai.rows() != Ab_.rows())
throw InvalidMatrixBlock(Ab_.rows(), Ai.rows());
// Assign key and matrix
Base::keys_[i] = key;
Ab_(i) = Ai;
// Increment block index
++ i;
}
// Assign RHS vector
getb() = b;
// Assign noise model
model_ = noiseModel;
}
} // gtsam
| 1,136 |
636 | <filename>fluent-mybatis-test/src/test/java/cn/org/atool/fluent/mybatis/generator/shared2/mix/HomeAddressTableMix.java
package cn.org.atool.fluent.mybatis.generator.shared2.mix;
import cn.org.atool.fluent.mybatis.generator.shared2.dm.HomeAddressDataMap;
import org.test4j.hamcrest.matcher.modes.EqMode;
import org.test4j.module.spec.IMix;
import org.test4j.module.spec.annotations.Step;
/**
* 数据库[home_address]表数据准备和校验通用方法
*
* @author Powered By Test4J
*/
@SuppressWarnings({"unused", "rawtypes", "UnusedReturnValue"})
public class HomeAddressTableMix implements IMix {
@Step("清空表[home_address]数据")
public HomeAddressTableMix cleanHomeAddressTable() {
db.table("home_address").clean();
return this;
}
@Step("准备表[home_address]数据{1}")
public HomeAddressTableMix readyHomeAddressTable(HomeAddressDataMap data) {
db.table("home_address").insert(data);
return this;
}
@Step("验证表[home_address]有全表数据{1}")
public HomeAddressTableMix checkHomeAddressTable(HomeAddressDataMap data, EqMode... modes) {
db.table("home_address").query().eqDataMap(data, modes);
return this;
}
@Step("验证表[home_address]有符合条件{1}的数据{2}")
public HomeAddressTableMix checkHomeAddressTable(String where, HomeAddressDataMap data,
EqMode... modes) {
db.table("home_address").queryWhere(where).eqDataMap(data, modes);
return this;
}
@Step("验证表[home_address]有符合条件{1}的数据{2}")
public HomeAddressTableMix checkHomeAddressTable(HomeAddressDataMap where,
HomeAddressDataMap data, EqMode... modes) {
db.table("home_address").queryWhere(where).eqDataMap(data, modes);
return this;
}
@Step("验证表[home_address]有{1}条符合条件{2}的数据")
public HomeAddressTableMix countHomeAddressTable(int count, HomeAddressDataMap where) {
db.table("home_address").queryWhere(where).sizeEq(count);
return this;
}
@Step("验证表[home_address]有{1}条符合条件{2}的数据")
public HomeAddressTableMix countHomeAddressTable(int count, String where) {
db.table("home_address").queryWhere(where).sizeEq(count);
return this;
}
@Step("验证表[home_address]有{1}条数据")
public HomeAddressTableMix countHomeAddressTable(int count) {
db.table("home_address").query().sizeEq(count);
return this;
}
}
| 962 |
938 | <reponame>karakufire/TinkersConstruct
package slimeknights.tconstruct.library.client.modifiers;
import com.google.common.collect.ImmutableList;
import com.google.gson.JsonObject;
import lombok.RequiredArgsConstructor;
import net.minecraft.client.renderer.model.BakedQuad;
import net.minecraft.client.renderer.model.RenderMaterial;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.util.JSONUtils;
import net.minecraft.util.math.vector.TransformationMatrix;
import slimeknights.mantle.client.model.util.MantleItemLayerModel;
import slimeknights.mantle.util.ItemLayerPixels;
import slimeknights.mantle.util.JsonHelper;
import slimeknights.tconstruct.library.modifiers.ModifierEntry;
import slimeknights.tconstruct.library.tools.nbt.IModifierToolStack;
import javax.annotation.Nullable;
import java.util.function.Function;
/**
* Default modifier model loader, loads a single texture from the standard path
*/
public class NormalModifierModel implements IBakedModifierModel {
/** Constant unbaked model instance, as they are all the same */
public static final IUnbakedModifierModel UNBAKED_INSTANCE = new Unbaked(-1, 0);
/** Textures to show */
private final RenderMaterial[] textures;
/** Color to apply to the texture */
private final int color;
/** Luminosity to apply to the texture */
private final int luminosity;
public NormalModifierModel(@Nullable RenderMaterial smallTexture, @Nullable RenderMaterial largeTexture, int color, int luminosity) {
this.color = color;
this.luminosity = luminosity;
this.textures = new RenderMaterial[]{ smallTexture, largeTexture };
}
public NormalModifierModel(@Nullable RenderMaterial smallTexture, @Nullable RenderMaterial largeTexture) {
this(smallTexture, largeTexture, -1, 0);
}
@Deprecated
@Override
public ImmutableList<BakedQuad> getQuads(IModifierToolStack tool, ModifierEntry entry, Function<RenderMaterial,TextureAtlasSprite> spriteGetter, TransformationMatrix transforms, boolean isLarge) {
return getQuads(tool, entry, spriteGetter, transforms, isLarge, -1, null);
}
@Override
public ImmutableList<BakedQuad> getQuads(IModifierToolStack tool, ModifierEntry entry, Function<RenderMaterial,TextureAtlasSprite> spriteGetter, TransformationMatrix transforms, boolean isLarge, int startTintIndex, @Nullable ItemLayerPixels pixels) {
int index = isLarge ? 1 : 0;
return MantleItemLayerModel.getQuadsForSprite(color, -1, spriteGetter.apply(textures[index]), transforms, luminosity, pixels);
}
@RequiredArgsConstructor
private static class Unbaked implements IUnbakedModifierModel {
private final int color;
private final int luminosity;
@Nullable
@Override
public IBakedModifierModel forTool(Function<String,RenderMaterial> smallGetter, Function<String,RenderMaterial> largeGetter) {
RenderMaterial smallTexture = smallGetter.apply("");
RenderMaterial largeTexture = largeGetter.apply("");
if (smallTexture != null || largeTexture != null) {
return new NormalModifierModel(smallTexture, largeTexture, color, luminosity);
}
return null;
}
@Override
public IUnbakedModifierModel configure(JsonObject data) {
// parse the two keys, if we ended up with something new create an instance
int color = JsonHelper.parseColor(JSONUtils.getString(data, "color", ""));
int luminosity = JSONUtils.getInt(data, "luminosity");
if (color != this.color || luminosity != this.luminosity) {
return new Unbaked(color, luminosity);
}
return this;
}
}
}
| 1,107 |
1,444 | <reponame>GabrielSturtevant/mage
package mage.cards.a;
import mage.abilities.effects.common.SacrificeEffect;
import mage.abilities.effects.keyword.BolsterEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.filter.StaticFilters;
import mage.target.TargetPlayer;
import java.util.UUID;
/**
*
* @author emerald000
*/
public final class AbzanAdvantage extends CardImpl {
public AbzanAdvantage(UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.INSTANT},"{1}{W}");
// Target player sacrifices an enchantment. Bolster 1.
this.getSpellAbility().addEffect(new SacrificeEffect(StaticFilters.FILTER_ENCHANTMENT_PERMANENT, 1, "Target player"));
this.getSpellAbility().addEffect(new BolsterEffect(1));
this.getSpellAbility().addTarget(new TargetPlayer());
}
private AbzanAdvantage(final AbzanAdvantage card) {
super(card);
}
@Override
public AbzanAdvantage copy() {
return new AbzanAdvantage(this);
}
}
| 383 |
2,151 | <reponame>chlorm-forks/gyp
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure that manual rules on Windows override the built in ones.
"""
import sys
import TestGyp
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'src'
test.run_gyp('override.gyp', chdir=CHDIR)
test.build('override.gyp', test.ALL, chdir=CHDIR)
expect = """\
Hello from program.c
Got 42.
"""
test.run_built_executable('program', chdir=CHDIR, stdout=expect)
test.pass_test()
| 232 |
831 | <reponame>phpc0de/idea-android
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.npw.assetstudio;
import static com.google.common.truth.Truth.assertThat;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.jetbrains.android.AndroidTestBase.getTestDataPath;
import static org.junit.Assert.fail;
import com.android.ide.common.util.AssetUtil;
import com.android.ide.common.util.PathString;
import com.android.ide.common.vectordrawable.VdIcon;
import com.android.resources.Density;
import com.android.tools.idea.npw.assetstudio.IconGenerator.IconOptions;
import com.android.tools.idea.npw.assetstudio.assets.ImageAsset;
import com.google.common.collect.ImmutableList;
import com.google.common.io.CharStreams;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.imageio.ImageIO;
import org.jetbrains.annotations.NotNull;
/**
* Shared test infrastructure for bitmap generator.
*/
public final class IconGeneratorTestUtil {
public enum SourceType { CLIPART, PNG, SVG }
static void checkGraphic(@NotNull IconGenerator generator,
@NotNull SourceType sourceType,
@NotNull String baseName,
int paddingPercent,
@NotNull List<String> expectedFolders,
@NotNull String goldenFileFolderName) throws IOException {
ImageAsset imageAsset = new ImageAsset();
if (sourceType == SourceType.CLIPART) {
imageAsset.setClipart(true);
imageAsset.color().setValue(new Color(0xA4C639)); // Android green.
}
File sourceFile = getSourceFile(sourceType);
imageAsset.imagePath().setValue(sourceFile);
imageAsset.paddingPercent().set(paddingPercent);
try {
checkGraphic(generator, imageAsset, baseName, expectedFolders, goldenFileFolderName);
} finally {
if (sourceType != SourceType.SVG) {
// Delete the temporary PNG file created by the test.
//noinspection ResultOfMethodCallIgnored
sourceFile.delete();
}
}
}
private static void checkGraphic(@NotNull IconGenerator generator,
@NotNull ImageAsset imageAsset,
@NotNull String baseName,
@NotNull List<String> expectedFolders,
@NotNull String goldenFileFolderName) throws IOException {
generator.sourceAsset().setValue(imageAsset);
generator.outputName().set(baseName);
IconOptions options = generator.createOptions(false);
Collection<GeneratedIcon> icons = generator.generateIcons(options).getIcons();
List<String> errors = new ArrayList<>();
List<String> actualFolders = new ArrayList<>(icons.size());
String testDataDir = getTestDataPath();
Path goldenRoot = Paths.get(testDataDir, "images", goldenFileFolderName);
for (GeneratedIcon generatedIcon : icons) {
PathString relativePath = generatedIcon.getOutputPath();
PathString folder = relativePath.getParent();
actualFolders.add(folder == null ? "" : folder.getFileName());
Path goldenBase = generatedIcon.getCategory() == IconCategory.PLAY_STORE ? goldenRoot : goldenRoot.resolve("res");
File goldenFile = goldenBase.resolve(relativePath.getNativePath()).toFile();
try (InputStream is = new BufferedInputStream(new FileInputStream(goldenFile))) {
if (generatedIcon instanceof GeneratedImageIcon) {
BufferedImage image = ((GeneratedImageIcon)generatedIcon).getImage();
BufferedImage goldenImage = ImageIO.read(is);
Density density = ((GeneratedImageIcon)generatedIcon).getDensity();
double maxDiffPercent = density == Density.NODPI ? 0.5 : 2.5 * Density.XXXHIGH.getDpiValue() / density.getDpiValue();
assertImageSimilar(relativePath, goldenImage, image, maxDiffPercent);
}
else if (generatedIcon instanceof GeneratedXmlResource) {
String text = ((GeneratedXmlResource)generatedIcon).getXmlText();
String goldenText = CharStreams.toString(new InputStreamReader(is, UTF_8));
assertThat(text.replace("\r\n", "\n")).isEqualTo(goldenText.replace("\r\n", "\n"));
}
} catch (FileNotFoundException e) {
if (generatedIcon instanceof GeneratedImageIcon) {
BufferedImage image = ((GeneratedImageIcon)generatedIcon).getImage();
generateGoldenImage(image, goldenFile);
errors.add("File did not exist, created " + goldenFile);
}
else if (generatedIcon instanceof GeneratedXmlResource) {
String text = ((GeneratedXmlResource)generatedIcon).getXmlText();
generateGoldenText(text, goldenFile);
errors.add("File did not exist, created " + goldenFile);
}
}
}
assertThat(errors).isEmpty();
assertThat(actualFolders).containsAllIn(expectedFolders);
}
@NotNull
private static File getSourceFile(@NotNull SourceType sourceType) throws IOException {
switch (sourceType) {
case CLIPART:
VirtualFile inputFile = VfsUtil.findFileByIoFile(getTestIconFile(), false);
File file = FileUtil.createTempFile("clipart", ".xml");
try (InputStream input = inputFile.getInputStream(); OutputStream output = new BufferedOutputStream(new FileOutputStream(file))) {
StreamUtil.copy(input, output);
}
return file;
case PNG: {
VdIcon androidIcon = new VdIcon(getTestIconFile().toURI().toURL());
BufferedImage sourceImage = androidIcon.renderIcon(512, 512);
File pngFile = FileUtil.createTempFile("android", ".png");
BufferedImage coloredImage = AssetUtil.filledImage(sourceImage, new Color(0xA4C639));
ImageIO.write(coloredImage, "PNG", pngFile);
return pngFile;
}
case SVG:
return new File(getTestDataPath(), "images/svg/android.svg");
default:
throw new IllegalArgumentException("Unrecognized source type: " + sourceType.toString());
}
}
private static void generateGoldenImage(@NotNull BufferedImage goldenImage, @NotNull File goldenFile) throws IOException {
assert !goldenFile.exists();
//noinspection ResultOfMethodCallIgnored
goldenFile.getParentFile().mkdirs();
ImageIO.write(goldenImage, "PNG", goldenFile);
}
private static void generateGoldenText(@NotNull String goldenText, @NotNull File goldenFile) throws IOException {
assert !goldenFile.exists();
//noinspection ResultOfMethodCallIgnored
goldenFile.getParentFile().mkdirs();
Files.write(goldenFile.toPath(), ImmutableList.of(goldenText), UTF_8);
}
@SuppressWarnings("SameParameterValue")
private static void assertImageSimilar(@NotNull PathString imagePath,
@NotNull BufferedImage goldenImage,
@NotNull BufferedImage image,
double maxPercentDifferent) throws IOException {
assertThat(Math.abs(goldenImage.getWidth() - image.getWidth()))
.named("difference in " + imagePath + " width")
.isLessThan(2);
assertThat(Math.abs(goldenImage.getHeight() - image.getHeight()))
.named("difference in " + imagePath + " height")
.isLessThan(2);
assertThat(image.getType()).isEqualTo(BufferedImage.TYPE_INT_ARGB);
if (goldenImage.getType() != BufferedImage.TYPE_INT_ARGB) {
BufferedImage temp = AssetUtil.newArgbBufferedImage(goldenImage.getWidth(), goldenImage.getHeight());
temp.getGraphics().drawImage(goldenImage, 0, 0, null);
goldenImage = temp;
}
assertThat(goldenImage.getType()).isEqualTo(BufferedImage.TYPE_INT_ARGB);
int imageWidth = Math.min(goldenImage.getWidth(), image.getWidth());
int imageHeight = Math.min(goldenImage.getHeight(), image.getHeight());
// Blur the images to account for the scenarios where there are pixel differences
// in where a sharp edge occurs.
// goldenImage = blur(goldenImage, 6);
// image = blur(image, 6);
BufferedImage deltaImage = AssetUtil.newArgbBufferedImage(3 * imageWidth, imageHeight);
Graphics g = deltaImage.getGraphics();
// Compute delta map
long delta = 0;
for (int y = 0; y < imageHeight; y++) {
for (int x = 0; x < imageWidth; x++) {
int goldenRgb = goldenImage.getRGB(x, y);
int rgb = image.getRGB(x, y);
if (goldenRgb == rgb) {
deltaImage.setRGB(imageWidth + x, y, 0x00808080);
continue;
}
// If the pixels have no opacity, don't delta colors at all.
if (((goldenRgb & 0xFF000000) == 0) && (rgb & 0xFF000000) == 0) {
deltaImage.setRGB(imageWidth + x, y, 0x00808080);
continue;
}
int deltaR = ((rgb & 0xFF0000) >>> 16) - ((goldenRgb & 0xFF0000) >>> 16);
int newR = 128 + deltaR & 0xFF;
int deltaG = ((rgb & 0x00FF00) >>> 8) - ((goldenRgb & 0x00FF00) >>> 8);
int newG = 128 + deltaG & 0xFF;
int deltaB = (rgb & 0x0000FF) - (goldenRgb & 0x0000FF);
int newB = 128 + deltaB & 0xFF;
int avgAlpha = ((((goldenRgb & 0xFF000000) >>> 24) + ((rgb & 0xFF000000) >>> 24)) / 2) << 24;
int newRGB = avgAlpha | newR << 16 | newG << 8 | newB;
deltaImage.setRGB(imageWidth + x, y, newRGB);
delta += Math.abs(deltaR);
delta += Math.abs(deltaG);
delta += Math.abs(deltaB);
}
}
// 3 different colors, 256 color levels.
long total = imageHeight * imageWidth * 3L * 256L;
float percentDifference = (float) (delta * 100 / (double) total);
if (percentDifference > maxPercentDifferent) {
// Expected on the left, golden on the right.
g.drawImage(goldenImage, 0, 0, null);
g.drawImage(image, 2 * imageWidth, 0, null);
// Labels
if (imageWidth > 80) {
g.setColor(Color.RED);
g.drawString("Expected", 10, 20);
g.drawString("Actual", 2 * imageWidth + 10, 20);
}
File output = new File(getTempDir(), "delta-" + imagePath.getRawPath().replace(File.separatorChar, '_'));
if (output.exists()) {
//noinspection ResultOfMethodCallIgnored
output.delete();
}
ImageIO.write(deltaImage, "PNG", output);
String message = String.format("Images differ (by %.1f%%) - see details in %s", percentDifference, output);
fail(message);
}
g.dispose();
}
private static File getTempDir() {
if (System.getProperty("os.name").equals("Mac OS X")) {
return new File("/tmp");
}
return new File(System.getProperty("java.io.tmpdir"));
}
private static File getTestIconFile() {
return new File(getTestDataPath(), "images/vd/ic_android_black_24dp.xml");
}
private IconGeneratorTestUtil() {}
}
| 4,672 |
852 | #include "FWCore/Framework/interface/Frameworkfwd.h"
#include "FWCore/Framework/interface/stream/EDProducer.h"
#include "FWCore/Framework/interface/Event.h"
#include "FWCore/Framework/interface/MakerMacros.h"
#include "FWCore/Framework/interface/makeRefToBaseProdFrom.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/Utilities/interface/StreamID.h"
#include "DataFormats/BTauReco/interface/JetTag.h"
#include "DataFormats/BTauReco/interface/DeepFlavourTagInfo.h"
#include "PhysicsTools/ONNXRuntime/interface/ONNXRuntime.h"
#include "RecoBTag/ONNXRuntime/interface/tensor_fillers.h"
#include "RecoBTag/ONNXRuntime/interface/tensor_configs.h"
using namespace cms::Ort;
class DeepVertexONNXJetTagsProducer : public edm::stream::EDProducer<edm::GlobalCache<ONNXRuntime>> {
public:
explicit DeepVertexONNXJetTagsProducer(const edm::ParameterSet&, const ONNXRuntime*);
~DeepVertexONNXJetTagsProducer() override;
static void fillDescriptions(edm::ConfigurationDescriptions&);
static std::unique_ptr<ONNXRuntime> initializeGlobalCache(const edm::ParameterSet&);
static void globalEndJob(const ONNXRuntime*);
private:
typedef std::vector<reco::DeepFlavourTagInfo> TagInfoCollection;
typedef reco::JetTagCollection JetTagCollection;
void produce(edm::Event&, const edm::EventSetup&) override;
void make_inputs(unsigned i_jet, const reco::DeepFlavourTagInfo& taginfo);
const edm::EDGetTokenT<TagInfoCollection> src_;
std::vector<std::string> flav_names_;
std::vector<std::string> input_names_;
std::vector<std::string> output_names_;
const double min_jet_pt_;
const double max_jet_eta_;
enum InputIndexes { kGlobal = 0, kSeedingTracks = 1, kNeighbourTracks = 2 };
const static unsigned n_features_global_ = deepvertex::n_features_global;
const static unsigned n_seed_ = deepvertex::n_seed;
const static unsigned n_features_seed_ = deepvertex::n_features_seed;
const static unsigned n_neighbor_ = deepvertex::n_neighbor;
const static unsigned n_features_neighbor_ = deepvertex::n_features_neighbor;
const static std::vector<unsigned> input_sizes_;
// hold the input data
FloatArrays data_;
};
const std::vector<unsigned> DeepVertexONNXJetTagsProducer::input_sizes_{n_features_global_,
n_seed_* n_features_seed_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_,
n_neighbor_* n_features_neighbor_};
DeepVertexONNXJetTagsProducer::DeepVertexONNXJetTagsProducer(const edm::ParameterSet& iConfig, const ONNXRuntime* cache)
: src_(consumes<TagInfoCollection>(iConfig.getParameter<edm::InputTag>("src"))),
flav_names_(iConfig.getParameter<std::vector<std::string>>("flav_names")),
input_names_(iConfig.getParameter<std::vector<std::string>>("input_names")),
output_names_(iConfig.getParameter<std::vector<std::string>>("output_names")),
min_jet_pt_(iConfig.getParameter<double>("min_jet_pt")),
max_jet_eta_(iConfig.getParameter<double>("max_jet_eta")) {
// get output names from flav_names
for (const auto& flav_name : flav_names_) {
produces<JetTagCollection>(flav_name);
}
assert(input_names_.size() == input_sizes_.size());
}
DeepVertexONNXJetTagsProducer::~DeepVertexONNXJetTagsProducer() {}
void DeepVertexONNXJetTagsProducer::fillDescriptions(edm::ConfigurationDescriptions& descriptions) {
// pfDeepFlavourJetTags
edm::ParameterSetDescription desc;
desc.add<edm::InputTag>("src", edm::InputTag("pfDeepFlavourTagInfos"));
desc.add<std::vector<std::string>>("input_names",
{"input_1",
"input_2",
"input_3",
"input_4",
"input_5",
"input_6",
"input_7",
"input_8",
"input_9",
"input_10",
"input_11",
"input_12"});
desc.add<edm::FileInPath>("model_path", edm::FileInPath("RecoBTag/Combined/data/DeepVertex/phase1_deepvertex.onnx"));
desc.add<std::vector<std::string>>("output_names", {"dense_6"});
desc.add<std::vector<std::string>>("flav_names", std::vector<std::string>{"probb", "probc", "probuds", "probg"});
desc.add<double>("min_jet_pt", 15.0);
desc.add<double>("max_jet_eta", 2.5);
descriptions.add("pfDeepVertexJetTags", desc);
}
std::unique_ptr<ONNXRuntime> DeepVertexONNXJetTagsProducer::initializeGlobalCache(const edm::ParameterSet& iConfig) {
return std::make_unique<ONNXRuntime>(iConfig.getParameter<edm::FileInPath>("model_path").fullPath());
}
void DeepVertexONNXJetTagsProducer::globalEndJob(const ONNXRuntime* cache) {}
void DeepVertexONNXJetTagsProducer::produce(edm::Event& iEvent, const edm::EventSetup& iSetup) {
edm::Handle<TagInfoCollection> tag_infos;
iEvent.getByToken(src_, tag_infos);
data_.clear();
std::vector<std::unique_ptr<JetTagCollection>> output_tags;
if (!tag_infos->empty()) {
unsigned good_taginfo_count = 0;
std::vector<bool> good_taginfo_jets(tag_infos->size(), false);
for (unsigned jet_n = 0; jet_n < tag_infos->size(); ++jet_n) {
const auto& jet_ref = (*tag_infos)[jet_n].jet();
if (jet_ref->pt() > min_jet_pt_ && std::fabs(jet_ref->eta()) < max_jet_eta_) {
good_taginfo_count++;
good_taginfo_jets[jet_n] = true;
}
}
// init data storage w correct size
for (const auto& len : input_sizes_) {
data_.emplace_back(good_taginfo_count * len, 0);
}
// initialize output collection
auto jet_ref = tag_infos->begin()->jet();
auto ref2prod = edm::makeRefToBaseProdFrom(jet_ref, iEvent);
for (std::size_t i = 0; i < flav_names_.size(); i++) {
output_tags.emplace_back(std::make_unique<JetTagCollection>(ref2prod));
}
// convert inputs
unsigned inputs_done_count = 0;
for (unsigned jet_n = 0; jet_n < tag_infos->size(); ++jet_n) {
if (good_taginfo_jets[jet_n]) {
const auto& taginfo = (*tag_infos)[jet_n];
make_inputs(inputs_done_count, taginfo);
inputs_done_count++;
}
}
// run prediction
assert(inputs_done_count == good_taginfo_count);
const auto outputs = globalCache()->run(input_names_, data_, {}, output_names_, good_taginfo_count)[0];
assert(outputs.size() == flav_names_.size() * good_taginfo_count);
// get the outputs
unsigned i_output = 0;
for (unsigned jet_n = 0; jet_n < tag_infos->size(); ++jet_n) {
const auto& jet_ref = (*tag_infos)[jet_n].jet();
for (std::size_t flav_n = 0; flav_n < flav_names_.size(); flav_n++) {
if (good_taginfo_jets[jet_n]) {
(*(output_tags[flav_n]))[jet_ref] = outputs[i_output];
++i_output;
} else {
(*(output_tags[flav_n]))[jet_ref] = -2;
}
}
}
} else {
// create empty output collection
for (std::size_t i = 0; i < flav_names_.size(); i++) {
output_tags.emplace_back(std::make_unique<JetTagCollection>());
}
}
// put into the event
for (std::size_t flav_n = 0; flav_n < flav_names_.size(); ++flav_n) {
iEvent.put(std::move(output_tags[flav_n]), flav_names_[flav_n]);
}
}
void DeepVertexONNXJetTagsProducer::make_inputs(unsigned i_jet, const reco::DeepFlavourTagInfo& taginfo) {
const auto& features = taginfo.features();
float* ptr = nullptr;
const float* start = nullptr;
unsigned offset = 0;
// jet variables
offset = i_jet * input_sizes_[kGlobal];
const auto& jet_features = features.jet_features;
ptr = &data_[kGlobal][offset];
start = ptr;
jet4vec_tensor_filler(ptr, jet_features);
assert(start + n_features_global_ - 1 == ptr);
// seeds
auto max_seed_n = std::min(features.seed_features.size(), (std::size_t)n_seed_);
offset = i_jet * input_sizes_[kSeedingTracks];
for (std::size_t seed_n = 0; seed_n < max_seed_n; seed_n++) {
const auto& seed_features = features.seed_features[seed_n];
ptr = &data_[kSeedingTracks][offset + seed_n * n_features_seed_];
start = ptr;
seedTrack_tensor_filler(ptr, seed_features);
assert(start + n_features_seed_ - 1 == ptr);
}
// neighbours
offset = i_jet * input_sizes_[kNeighbourTracks];
for (std::size_t seed_n = 0; seed_n < max_seed_n; seed_n++) {
const auto& neighbourTracks_features = features.seed_features[seed_n].nearTracks;
auto max_neighbour_n = std::min(neighbourTracks_features.size(), (std::size_t)n_neighbor_);
for (std::size_t neighbour_n = 0; neighbour_n < max_neighbour_n; neighbour_n++) {
ptr = &data_[kNeighbourTracks + seed_n][offset + neighbour_n * n_features_neighbor_];
start = ptr;
neighbourTrack_tensor_filler(ptr, neighbourTracks_features[neighbour_n]);
assert(start + n_features_neighbor_ - 1 == ptr);
}
}
}
//define this as a plug-in
DEFINE_FWK_MODULE(DeepVertexONNXJetTagsProducer);
| 4,698 |
1,830 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Licensed under the Zeebe Community License 1.1. You may not use this file
* except in compliance with the Zeebe Community License 1.1.
*/
package io.camunda.zeebe.gateway.interceptors;
import io.camunda.zeebe.gateway.query.QueryApi;
import io.grpc.Context;
import io.grpc.Context.Key;
/** A set of utilities which interceptor authors can use in their interceptors. */
public final class InterceptorUtil {
private static final Key<QueryApi> QUERY_API_KEY = Context.key("zeebe-query-api");
private InterceptorUtil() {}
/**
* Returns an instance of {@link QueryApi} usable in an interceptor. Note that, as per the gRPC
* documentation, it's perfectly fine to block in a call and/or listener, which may greatly
* simplify the usage of the API in your code.
*
* <p>If you use the API asynchronously, there are a few gotchas to remember:
*
* <ul>
* <li>if your interceptor is loaded via an external JAR, and it uses directly or indirectly the
* {@link Thread#getContextClassLoader()} to load classes, you will need to make sure to set
* the appropriate context class loader in your callbacks, otherwise you may run into {@link
* ClassNotFoundException} errors
* <li>your callback may be executed on a different thread than the initial call, so you will
* have to deal with thread safety; using a {@link io.grpc.internal.SerializingExecutor} or
* similar may help
* <li>since your callback may be executed on a different thread, the {@link Context#current()}
* maybe different; if you want to use the same original context, you will need to close on
* it in your callback, or extract what you need from it beforehand and close on that
* </ul>
*
* <p>Example usage:
*
* <pre>{@code
* final Context context = Context.current();
* final QueryApi api = InterceptorUtil.getQueryApiKey().get(context);
* final String processId;
*
* try {
* processId = queryApi.getBpmnProcessIdForProcess(processKey).toCompletableFuture().join();
* } catch(final Exception e) {
* // close the call on error
* return;
* }
*
* // do something with the processId
* }</pre>
*
* @return the context key associated with the current query API
*/
public static Key<QueryApi> getQueryApiKey() {
return QUERY_API_KEY;
}
}
| 825 |
634 | <filename>backends-common/cassandra/src/main/java/org/apache/james/backends/cassandra/init/CassandraZonedDateTimeModule.java
/****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.backends.cassandra.init;
import static com.datastax.driver.core.DataType.text;
import static com.datastax.driver.core.DataType.timestamp;
import java.time.ZonedDateTime;
import java.util.Optional;
import org.apache.james.backends.cassandra.components.CassandraModule;
import org.apache.james.backends.cassandra.utils.ZonedDateTimeRepresentation;
import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType;
public interface CassandraZonedDateTimeModule {
String ZONED_DATE_TIME = "zonedDateTime";
String DATE = "date";
String TIME_ZONE = "timeZone";
CassandraModule MODULE = CassandraModule.type(ZONED_DATE_TIME)
.statement(statement -> statement
.addColumn(DATE, timestamp())
.addColumn(TIME_ZONE, text()))
.build();
static UDTValue toUDT(UserType zonedDateTimeUserType, ZonedDateTime zonedDateTime) {
ZonedDateTimeRepresentation representation = ZonedDateTimeRepresentation.fromZonedDateTime(zonedDateTime);
return zonedDateTimeUserType.newValue()
.setTimestamp(CassandraZonedDateTimeModule.DATE, representation.getDate())
.setString(CassandraZonedDateTimeModule.TIME_ZONE, representation.getSerializedZoneId());
}
static Optional<UDTValue> toUDT(UserType zonedDateTimeUserType, Optional<ZonedDateTime> zonedDateTimeOptional) {
return zonedDateTimeOptional.map(zonedDateTime -> toUDT(zonedDateTimeUserType, zonedDateTime));
}
static Optional<ZonedDateTime> fromUDTOptional(UDTValue value) {
return Optional.ofNullable(value).map(CassandraZonedDateTimeModule::fromUDT);
}
static ZonedDateTime fromUDT(UDTValue udtValue) {
return ZonedDateTimeRepresentation.fromDate(
udtValue.getTimestamp(CassandraZonedDateTimeModule.DATE),
udtValue.getString(CassandraZonedDateTimeModule.TIME_ZONE))
.getZonedDateTime();
}
}
| 1,231 |
746 | <reponame>xizi/mmdeploy<gh_stars>100-1000
# Copyright (c) OpenMMLab. All rights reserved.
import glob
import os.path as osp
def get_ops_path() -> str:
"""Get path of the torchscript extension library.
Returns:
str: A path of the torchscript extension library.
"""
wildcard = osp.abspath(
osp.join(
osp.dirname(__file__),
'../../../build/lib/libmmdeploy_torchscript_ops.so'))
paths = glob.glob(wildcard)
lib_path = paths[0] if len(paths) > 0 else ''
return lib_path
def ops_available() -> bool:
"""Return whether ops are available.
Returns:
bool: Whether ops are available.
"""
return osp.exists(get_ops_path())
| 294 |
3,586 | <gh_stars>1000+
package com.linkedin.datahub.graphql.resolvers.load;
import com.linkedin.datahub.graphql.UsageStatsKey;
import com.linkedin.datahub.graphql.types.LoadableType;
import com.linkedin.pegasus2avro.usage.UsageQueryResult;
import com.linkedin.usage.UsageTimeRange;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.util.concurrent.CompletableFuture;
import org.dataloader.DataLoader;
/**
* Generic GraphQL resolver responsible for
*
* 1. Retrieving a single input urn.
* 2. Resolving a single {@link LoadableType}.
*
* Note that this resolver expects that {@link DataLoader}s were registered
* for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()}
*
*/
public class UsageTypeResolver implements DataFetcher<CompletableFuture<UsageQueryResult>> {
@Override
public CompletableFuture<UsageQueryResult> get(DataFetchingEnvironment environment) {
final DataLoader<UsageStatsKey, UsageQueryResult> loader = environment.getDataLoaderRegistry().getDataLoader("UsageQueryResult");
String resource = environment.getArgument("resource");
UsageTimeRange duration = UsageTimeRange.valueOf(environment.getArgument("range"));
UsageStatsKey key = new UsageStatsKey(resource, duration);
return loader.load(key);
}
}
| 436 |
790 | <filename>AppServer/google/appengine/tools/devappserver2/thread_executor.py
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A thread-based executor that creates a new thread per work submission.
The advantage of this approach over ThreadPoolExecutor is:
1. there is no upper bound to the number of threads.
2. threads exit as soon as their work is done.
The disadvantage is the cost of one thread creation per work submission so
ThreadExecutor is only efficient when the cost of creating a thread is small
compared to the work being done.
"""
import threading
import google
from concurrent import futures
def _worker(future, fn, args, kwargs):
if not future.set_running_or_notify_cancel():
return
try:
result = fn(*args, **kwargs)
except BaseException as e:
future.set_exception(e)
else:
future.set_result(result)
class ThreadExecutor(futures.Executor):
"""A thread-based executor that creates a new thread per work submission."""
def __init__(self):
"""Initializes a new ThreadExecutor instance."""
self._shutdown = False
self._shutdown_lock = threading.Lock()
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._shutdown:
raise RuntimeError('cannot schedule new futures after shutdown')
f = futures.Future()
t = threading.Thread(target=_worker, args=(f, fn, args, kwargs))
t.start()
return f
submit.__doc__ = futures.Executor.submit.__doc__
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown = True
shutdown.__doc__ = futures.Executor.shutdown.__doc__
| 662 |
667 | //
// Copyright (c) 2013 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
#include "compiler/translator/TranslatorGLSL.h"
//
// This function must be provided to create the actual
// compile object used by higher level code. It returns
// a subclass of TCompiler.
//
TCompiler* ConstructCompiler(
sh::GLenum type, ShShaderSpec spec, ShShaderOutput output)
{
switch (output) {
case SH_GLSL_COMPATIBILITY_OUTPUT:
return new TranslatorGLSL(type, spec, output);
default:
// Unknown format. Return NULL per the ShConstructCompiler API.
return NULL;
}
}
//
// Delete the compiler made by ConstructCompiler
//
void DeleteCompiler(TCompiler* compiler)
{
delete compiler;
}
| 279 |
2,261 | <reponame>XRay3D/compile-time-regular-expressions<gh_stars>1000+
#ifndef CTLL__TYPE_STACK__HPP
#define CTLL__TYPE_STACK__HPP
#include "utilities.hpp"
namespace ctll {
template <typename... Ts> struct list { };
struct _nothing { };
using empty_list = list<>;
// calculate size of list content
template <typename... Ts> constexpr auto size(list<Ts...>) noexcept { return sizeof...(Ts); }
// check if the list is empty
template <typename... Ts> constexpr bool empty(list<Ts...>) noexcept { return false; }
constexpr bool empty(empty_list) { return true; }
// concat two lists together left to right
template <typename... As, typename... Bs> constexpr auto concat(list<As...>, list<Bs...>) noexcept -> list<As..., Bs...> { return {}; }
// push something to the front of a list
template <typename T, typename... As> constexpr auto push_front(T, list<As...>) noexcept -> list<T, As...> { return {}; }
// pop element from the front of a list
template <typename T, typename... As> constexpr auto pop_front(list<T, As...>) noexcept -> list<As...> { return {}; }
constexpr auto pop_front(empty_list) -> empty_list;
// pop element from the front of a list and return new typelist too
template <typename Front, typename List> struct list_pop_pair {
Front front{};
List list{};
constexpr list_pop_pair() = default;
};
template <typename Head, typename... As, typename T = _nothing> constexpr auto pop_and_get_front(list<Head, As...>, T = T()) noexcept -> list_pop_pair<Head, list<As...>> { return {}; }
template <typename T = _nothing> constexpr auto pop_and_get_front(empty_list, T = T()) noexcept -> list_pop_pair<T, empty_list> { return {}; }
// return front of the list
template <typename Head, typename... As, typename T = _nothing> constexpr auto front(list<Head, As...>, T = T()) noexcept -> Head { return {}; }
template <typename T = _nothing> constexpr auto front(empty_list, T = T()) noexcept -> T { return {}; }
// set operations
template <typename T> struct item_matcher {
struct not_selected {
template <typename... Ts> friend constexpr auto operator+(list<Ts...>, not_selected) -> list<Ts...>;
};
template <typename Y> struct wrapper {
template <typename... Ts> friend constexpr auto operator+(list<Ts...>, wrapper<Y>) -> list<Ts...,Y>;
};
static constexpr auto check(T) { return std::true_type{}; }
static constexpr auto check(...) { return std::false_type{}; }
static constexpr auto select(T) { return not_selected{}; }
template <typename Y> static constexpr auto select(Y) { return wrapper<Y>{}; }
};
template <typename T, typename... Ts> constexpr bool exists_in(T, list<Ts...>) noexcept {
return (item_matcher<T>::check(Ts{}) || ... || false);
}
template <typename T, typename... Ts> constexpr auto add_item(T item, list<Ts...> l) noexcept {
if constexpr (exists_in(item, l)) {
return l;
} else {
return list<Ts..., T>{};
}
}
template <typename T, typename... Ts> constexpr auto remove_item(T, list<Ts...>) noexcept {
item_matcher<T> matcher;
return decltype((list<>{} + ... + matcher.select(Ts{}))){};
}
}
#endif
| 1,046 |
695 | <reponame>xenron/sandbox-dev-lintcode
// Time: O(n)
// Space: O(n)
class Solution {
public:
/**
* @param expression: a vector of strings;
* @return: an integer
*/
int evaluateExpression(vector<string> &expression) {
stack<int> operands;
stack<string> operators;
if (expression.empty()) {
return 0;
}
for (int i = expression.size() - 1; i >= 0; --i) {
if (isdigit(expression[i][0])) {
operands.emplace(stoi(expression[i]));
} else if (expression[i] == ")" || expression[i] == "*" ||
expression[i] == "/") {
operators.emplace(expression[i]);
} else if (expression[i] == "+" || expression[i] == "-") {
while (!operators.empty() && (operators.top() == "*" ||
operators.top() == "/")) {
compute(operands, operators);
}
operators.emplace(expression[i]);
} else if (expression[i] == "(") {
// operators at least one element, i.e. ")".
while (operators.top() != ")") {
compute(operands, operators);
}
operators.pop();
}
}
while (!operators.empty()) {
compute(operands, operators);
}
return operands.top();
}
void compute(stack<int>& operands, stack<string>& operators) {
const int left = operands.top();
operands.pop();
const int right = operands.top();
operands.pop();
const string op = operators.top();
operators.pop();
if (op == "+") {
operands.emplace(left + right);
} else if (op == "-") {
operands.emplace(left - right);
} else if (op == "*") {
operands.emplace(left * right);
} else if (op == "/") {
operands.emplace(left / right);
}
}
};
// Time: O(n)
// Space: O(n)
class Solution2 {
public:
/**
* @param expression: a vector of strings;
* @return: an integer
*/
int evaluateExpression(vector<string> &expression) {
if (expression.empty()) {
return 0;
}
vector<string> postfix;
infixToPostfix(expression, postfix);
return evaluatePostfixExpression(postfix);
}
// Evaluate Postfix Expression.
int evaluatePostfixExpression(const vector<string> &postfix) {
if (postfix.empty()) {
return 0;
}
stack<string> s;
for (const auto& tok : postfix) {
if (!is_operator(tok)) {
s.emplace(tok);
} else {
int y = stoi(s.top());
s.pop();
int x = stoi(s.top());
s.pop();
if (tok[0] == '+') {
x += y;
} else if (tok[0] == '-') {
x -= y;
} else if (tok[0] == '*') {
x *= y;
} else {
x /= y;
}
s.emplace(to_string(x));
}
}
return stoi(s.top());
}
bool is_operator(const string &op) {
return op.length() == 1 && string("+-*/").find(op) != string::npos;
}
// Convert Infix to Postfix Expression.
void infixToPostfix(vector<string>& infix, vector<string>& postfix) {
stack<string> s;
for (auto tok : infix) {
// Any number would be pushed into stack.
if (atoi(tok.c_str())) {
postfix.emplace_back(tok);
} else if (tok == "(") {
s.emplace(tok);
} else if (tok == ")") {
// Meet ")", then pop until "(".
while (!s.empty()) {
tok = s.top();
s.pop();
if (tok == "(") {
break;
}
postfix.emplace_back(tok);
}
} else {
// Order of tokens in stack should be like "(-*",
// The token will be added in an strictly increasing precedence order.
while (!s.empty() && precedence(tok) <= precedence(s.top())) {
postfix.emplace_back(s.top());
s.pop();
}
s.emplace(tok);
}
}
// Pop the remaining token and add them to the postfix.
while (!s.empty()) {
postfix.emplace_back(s.top());
s.pop();
}
}
int precedence(string x) {
if (x == "(") { // The least precedence.
return 0;
} else if (x == "+" || x == "-") {
return 1;
} else if (x == "*" || x == "/") {
return 2;
}
return 3;
}
};
| 2,704 |
554 | <filename>puput/models.py
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.template.defaultfilters import slugify
from django.utils.translation import gettext_lazy as _
from wagtail.core.models import Page
from wagtail.admin.edit_handlers import FieldPanel
from wagtail.snippets.models import register_snippet
from wagtail.search import index
from taggit.models import TaggedItemBase, Tag as TaggitTag
from modelcluster.fields import ParentalKey
from .abstracts import EntryAbstract, BlogAbstract
from .utils import import_model
from .routes import BlogRoutes
from .managers import TagManager, CategoryManager, BlogManager
Entry = import_model(getattr(settings, 'PUPUT_ENTRY_MODEL', EntryAbstract))
Blog = import_model(getattr(settings, 'PUPUT_BLOG_MODEL', BlogAbstract))
class BlogPage(BlogRoutes, Page, Blog):
extra = BlogManager()
content_panels = Page.content_panels + getattr(Blog, 'content_panels', [])
settings_panels = Page.settings_panels + getattr(Blog, 'settings_panels', [])
subpage_types = ['puput.EntryPage']
def get_entries(self):
return EntryPage.objects.descendant_of(self).live().order_by('-date').select_related('owner')
def get_context(self, request, *args, **kwargs):
context = super(BlogPage, self).get_context(request, *args, **kwargs)
context['entries'] = self.entries
context['blog_page'] = self
context['search_type'] = getattr(self, 'search_type', "")
context['search_term'] = getattr(self, 'search_term', "")
return context
@property
def last_url_part(self):
"""
Get the BlogPage url without the domain
"""
return self.get_url_parts()[-1]
class Meta:
verbose_name = _('Blog')
@register_snippet
class Category(models.Model):
name = models.CharField(max_length=80, unique=True, verbose_name=_('Category name'))
slug = models.SlugField(unique=True, max_length=80)
parent = models.ForeignKey(
'self',
blank=True,
null=True,
related_name="children",
verbose_name=_('Parent category'),
on_delete=models.SET_NULL
)
description = models.CharField(max_length=500, blank=True, verbose_name=_('Description'))
objects = CategoryManager()
def __str__(self):
return self.name
def clean(self):
if self.parent:
parent = self.parent
if self.parent == self:
raise ValidationError(_('Parent category cannot be self.'))
if parent.parent and parent.parent == self:
raise ValidationError(_('Cannot have circular Parents.'))
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
return super(Category, self).save(*args, **kwargs)
class Meta:
ordering = ['name']
verbose_name = _("Category")
verbose_name_plural = _("Categories")
class CategoryEntryPage(models.Model):
category = models.ForeignKey(Category, related_name="+", verbose_name=_('Category'), on_delete=models.CASCADE)
page = ParentalKey('EntryPage', related_name='entry_categories')
panels = [
FieldPanel('category')
]
def __str__(self):
return str(self.category)
class TagEntryPage(TaggedItemBase):
content_object = ParentalKey('EntryPage', related_name='entry_tags')
@register_snippet
class Tag(TaggitTag):
objects = TagManager()
class Meta:
proxy = True
class EntryPageRelated(models.Model):
entrypage_from = ParentalKey('EntryPage', verbose_name=_("Entry"), related_name='related_entrypage_from')
entrypage_to = ParentalKey('EntryPage', verbose_name=_("Entry"), related_name='related_entrypage_to')
def __str__(self):
return str(self.entrypage_to)
class EntryPage(Entry, Page):
# Search
search_fields = Page.search_fields + [
index.SearchField('body'),
index.SearchField('excerpt'),
index.FilterField('page_ptr_id')
]
# Panels
content_panels = getattr(Entry, 'content_panels', [])
promote_panels = Page.promote_panels + getattr(Entry, 'promote_panels', [])
settings_panels = Page.settings_panels + [
FieldPanel('date'), FieldPanel('owner'),
] + getattr(Entry, 'settings_panels', [])
# Parent and child settings
parent_page_types = ['puput.BlogPage']
subpage_types = []
def get_sitemap_urls(self, request=None):
from .urls import get_entry_url
root_url = self.get_url_parts()[1]
entry_url = get_entry_url(self, self.blog_page.page_ptr, root_url)
return [
{
'location': root_url + entry_url,
# fall back on latest_revision_created_at if last_published_at is null
# (for backwards compatibility from before last_published_at was added)
'lastmod': (self.last_published_at or self.latest_revision_created_at),
}
]
@property
def blog_page(self):
return self.get_parent().specific
@property
def related(self):
return [related.entrypage_to for related in self.related_entrypage_from.all()]
@property
def has_related(self):
return self.related_entrypage_from.count() > 0
def get_absolute_url(self):
return self.full_url
def get_context(self, request, *args, **kwargs):
context = super(EntryPage, self).get_context(request, *args, **kwargs)
context['blog_page'] = self.blog_page
return context
class Meta:
verbose_name = _('Entry')
verbose_name_plural = _('Entries')
| 2,243 |
3,402 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.cube.model;
import org.apache.commons.lang.StringUtils;
import org.apache.kylin.dimension.DateDimEnc;
import org.apache.kylin.dimension.DictionaryDimEnc;
import org.apache.kylin.dimension.DimensionEncoding;
import org.apache.kylin.dimension.DimensionEncodingFactory;
import org.apache.kylin.dimension.FixedLenDimEnc;
import org.apache.kylin.dimension.TimeDimEnc;
import org.apache.kylin.metadata.datatype.DataType;
import org.apache.kylin.metadata.model.TblColRef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.kylin.shaded.com.google.common.base.MoreObjects;
import org.apache.kylin.shaded.com.google.common.base.Preconditions;
/**
* @author yangli9
*
*/
@JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
public class RowKeyColDesc implements java.io.Serializable {
private static final Logger logger = LoggerFactory.getLogger(RowKeyColDesc.class);
public static boolean isDateDimEnc(RowKeyColDesc rowKeyColDesc) {
return DateDimEnc.ENCODING_NAME.equals(rowKeyColDesc.getEncodingName());
}
public static boolean isTimeDimEnc(RowKeyColDesc rowKeyColDesc) {
return TimeDimEnc.ENCODING_NAME.equals(rowKeyColDesc.getEncodingName());
}
@JsonProperty("column")
private String column;
@JsonProperty("encoding")
private String encoding;
@JsonProperty("encoding_version")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
private int encodingVersion = 1;
@JsonProperty("isShardBy")
private boolean isShardBy;//usually it is ultra high cardinality column, shard by such column can reduce the agg cache for each shard
@JsonProperty("index")
@JsonInclude(JsonInclude.Include.NON_NULL)
private String index;
// computed
private String encodingName;
private String[] encodingArgs;
private int bitIndex;
private TblColRef colRef;
public void init(int index, CubeDesc cubeDesc) {
bitIndex = index;
colRef = cubeDesc.getModel().findColumn(column);
column = colRef.getIdentity();
Preconditions.checkArgument(colRef != null, "Cannot find rowkey column %s in cube %s", column, cubeDesc);
Preconditions.checkState(StringUtils.isNotEmpty(this.encoding));
Object[] encodingConf = DimensionEncoding.parseEncodingConf(this.encoding);
encodingName = (String) encodingConf[0];
encodingArgs = (String[]) encodingConf[1];
if (!DimensionEncodingFactory.isValidEncoding(this.encodingName))
throw new IllegalArgumentException("Not supported row key col encoding: '" + this.encoding + "'");
// convert date/time dictionary on date/time column to DimensionEncoding implicitly
// however date/time dictionary on varchar column is still required
DataType type = colRef.getType();
if (DictionaryDimEnc.ENCODING_NAME.equals(encodingName) && cubeDesc.getConfig().isRowKeyEncodingAutoConvert()) {
if (type.isDate()) {
encoding = encodingName = DateDimEnc.ENCODING_NAME;
logger.info("Implicitly convert encoding to {}", encodingName);
}
if (type.isTimeFamily()) {
encoding = encodingName = TimeDimEnc.ENCODING_NAME;
logger.info("Implicitly convert encoding to {}", encodingName);
}
}
encodingArgs = DateDimEnc.replaceEncodingArgs(encoding, encodingArgs, encodingName, type);
if (encodingName.startsWith(FixedLenDimEnc.ENCODING_NAME) && (type.isIntegerFamily() || type.isNumberFamily())) {
logger.warn(colRef + " type is " + type + " and cannot apply fixed_length encoding");
}
}
public String getEncoding() {
return encoding;
}
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public String getColumn() {
return column;
}
public void setColumn(String column) {
this.column = column;
}
public boolean isShardBy() {
return isShardBy;
}
public void setShardBy(boolean shardBy) {
isShardBy = shardBy;
}
public String getEncodingName() {
return encodingName;
}
public String[] getEncodingArgs() {
return encodingArgs;
}
public boolean isUsingDictionary() {
return DictionaryDimEnc.ENCODING_NAME.equals(encodingName);
}
public int getBitIndex() {
return bitIndex;
}
public TblColRef getColRef() {
return colRef;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public int getEncodingVersion() {
return encodingVersion;
}
public void setEncodingVersion(int encodingVersion) {
this.encodingVersion = encodingVersion;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((column == null) ? 0 : column.hashCode());
return result;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("column", column).add("encoding", encoding).toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RowKeyColDesc that = (RowKeyColDesc) o;
if (column != null ? !column.equals(that.column) : that.column != null) {
return false;
}
return true;
}
}
| 2,481 |
2,690 | <gh_stars>1000+
#ifndef SM_NUMERICAL_DIFF_HPP
#define SM_NUMERICAL_DIFF_HPP
#include <Eigen/Core>
#include <sm/assert_macros.hpp>
#include <boost/function.hpp>
#include <boost/bind.hpp>
namespace sm { namespace eigen {
template<typename RESULT_VEC_T, typename INPUT_VEC_T, typename JACOBIAN_T = Eigen::MatrixXd>
struct NumericalDiffFunctor
{
typedef RESULT_VEC_T value_t;
typedef typename value_t::Scalar scalar_t;
typedef INPUT_VEC_T input_t;
typedef JACOBIAN_T jacobian_t;
NumericalDiffFunctor( boost::function< value_t(input_t) > f) : _f(f){}
value_t operator()(const input_t & x) { return _f(x); }
input_t update(const input_t & x, int c, scalar_t delta) { input_t xnew = x; xnew[c] += delta; return xnew; }
boost::function<value_t(input_t)> _f;
};
// A simple implementation of central differences to estimate a Jacobian matrix
template<typename FUNCTOR_T>
struct NumericalDiff
{
typedef FUNCTOR_T functor_t;
typedef typename functor_t::input_t input_t;
typedef typename functor_t::value_t value_t;
typedef typename functor_t::scalar_t scalar_t;
typedef typename functor_t::jacobian_t jacobian_t;
NumericalDiff(functor_t f, scalar_t eps = sqrt(std::numeric_limits<scalar_t>::epsilon())) : functor(f), eps(eps) {}
jacobian_t estimateJacobian(input_t const & x0)
{
// evaluate the function at the operating point:
value_t fx0 = functor(x0);
size_t N = x0.size();
size_t M = fx0.size();
//std::cout << "Size: " << M << ", " << N << std::endl;
jacobian_t J;
J.resize(M, N);
SM_ASSERT_EQ(std::runtime_error,x0.size(),J.cols(),"Unexpected number of columns for input size");
SM_ASSERT_EQ(std::runtime_error,fx0.size(),J.rows(),"Unexpected number of columns for output size");
for(unsigned c = 0; c < N; c++) {
// Calculate a central difference.
// This step size was stolen from cminpack: temp = eps * fabs(x[j]);
scalar_t rcEps = std::max(static_cast<scalar_t>(fabs(x0(c))) * eps,eps);
value_t fxp = functor(functor.update(x0,c,rcEps));
value_t fxm = functor(functor.update(x0,c,-rcEps));
J.block(0, c, M, 1) = (fxp - fxm).template cast<typename jacobian_t::Scalar>()/(typename jacobian_t::Scalar)(rcEps*(scalar_t)2.0);
}
return J;
}
functor_t functor;
scalar_t eps;
};
template < typename ValueType_, typename InputType_>
Eigen::MatrixXd numericalDiff(std::function<ValueType_ (const InputType_ &) > function, InputType_ const & input, double eps = sqrt(std::numeric_limits<typename NumericalDiffFunctor<ValueType_, InputType_>::scalar_t>::epsilon())){
typedef NumericalDiffFunctor<ValueType_, InputType_> Functor;
NumericalDiff<Functor> numDiff(Functor(function), eps);
return numDiff.estimateJacobian(input);
}
}}
#endif /* SM_NUMERICAL_DIFF_HPP */
| 1,381 |
362 | <gh_stars>100-1000
// Copyright (c) 2018-2020, <NAME>. For more information see 'LICENSE'
#pragma once
#include "scene/Renderer/ScenePreRender.h"
#include "scene/Renderer/RenderQueue.h"
#include "scene/Shaders/ShaderCache.h"
namespace FG
{
//
// Render Technique interface
//
class IRenderTechnique
{
// types
public:
using GraphicsPipelineInfo = ShaderCache::GraphicsPipelineInfo;
using RayTracingPipelineInfo = ShaderCache::RayTracingPipelineInfo;
using ComputePipelineInfo = ShaderCache::ComputePipelineInfo;
protected:
class RenderQueueImpl final : public RenderQueue
{
public:
RenderQueueImpl () {}
void Create (const CommandBuffer &cmd, const CameraInfo &camera) { return _Create( cmd, camera ); }
ND_ Task Submit (ArrayView<Task> dependsOn = Default) { return _Submit( dependsOn ); }
void AddLayer (ERenderLayer layer, LogicalPassID passId,
const PipelineResources& pplnRes, StringView dbgName = Default) { return _AddLayer( layer, passId, &pplnRes, dbgName ); }
void AddLayer (ERenderLayer layer, const RenderPassDesc &desc,
const PipelineResources& pplnRes, StringView dbgName = Default) { return _AddLayer( layer, desc, &pplnRes, dbgName ); }
};
using CameraData_t = ScenePreRender::CameraData;
// interface
public:
virtual void Destroy () = 0;
virtual bool Render (const ScenePreRender &) = 0;
virtual bool GetPipeline (ERenderLayer, INOUT GraphicsPipelineInfo &, OUT RawGPipelineID &) = 0;
virtual bool GetPipeline (ERenderLayer, INOUT GraphicsPipelineInfo &, OUT RawMPipelineID &) = 0;
virtual bool GetPipeline (ERenderLayer, INOUT RayTracingPipelineInfo &, OUT RawRTPipelineID &) = 0;
virtual bool GetPipeline (ERenderLayer, INOUT ComputePipelineInfo &, OUT RawCPipelineID &) = 0;
ND_ virtual Ptr<ShaderCache> GetShaderBuilder () = 0;
ND_ virtual FrameGraph GetFrameGraph () = 0;
protected:
ND_ static ScenePreRender::CameraArray_t const& _GetCameras (const ScenePreRender &preRender) { return preRender._cameras; }
};
} // FG
| 730 |
791 | import tensorflow as tf
from tensorflow.python.ops import variables
from tensorflow.python.ops import array_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tf_extended import math as tfe_math
import util
def _create_local(name, shape, collections=None, validate_shape=True,
dtype=tf.float32):
"""Creates a new local variable.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
collections: A list of collection names to which the Variable will be added.
validate_shape: Whether to validate the shape of the variable.
dtype: Data type of the variables.
Returns:
The created variable.
"""
# Make sure local variables are added to tf.GraphKeys.LOCAL_VARIABLES
collections = list(collections or [])
collections += [ops.GraphKeys.LOCAL_VARIABLES]
return variables.Variable(
initial_value=array_ops.zeros(shape, dtype=dtype),
name=name,
trainable=False,
collections=collections,
validate_shape=validate_shape)
def streaming_tp_fp_arrays(num_gbboxes, tp, fp,
metrics_collections=None,
updates_collections=None,
name=None):
"""Streaming computation of True and False Positive arrays.
"""
with variable_scope.variable_scope(name, 'streaming_tp_fp',
[num_gbboxes, tp, fp]):
num_gbboxes = tf.cast(num_gbboxes, tf.int32)
tp = tf.cast(tp, tf.bool)
fp = tf.cast(fp, tf.bool)
# Reshape TP and FP tensors and clean away 0 class values.
tp = tf.reshape(tp, [-1])
fp = tf.reshape(fp, [-1])
# Local variables accumlating information over batches.
v_num_objects = _create_local('v_num_gbboxes', shape=[], dtype=tf.int32)
v_tp = _create_local('v_tp', shape=[0, ], dtype=tf.bool)
v_fp = _create_local('v_fp', shape=[0, ], dtype=tf.bool)
# Update operations.
num_objects_op = state_ops.assign_add(v_num_objects,
tf.reduce_sum(num_gbboxes))
tp_op = state_ops.assign(v_tp, tf.concat([v_tp, tp], axis=0),
validate_shape=False)
fp_op = state_ops.assign(v_fp, tf.concat([v_fp, fp], axis=0),
validate_shape=False)
# Value and update ops.
val = (v_num_objects, v_tp, v_fp)
with ops.control_dependencies([num_objects_op, tp_op, fp_op]):
update_op = (num_objects_op, tp_op, fp_op)
return val, update_op
def precision_recall(num_gbboxes, tp, fp, scope=None):
"""Compute precision and recall from true positives and false
positives booleans arrays
"""
# Sort by score.
with tf.name_scope(scope, 'precision_recall'):
# Computer recall and precision.
tp = tf.reduce_sum(tf.cast(tp, tf.float32), axis=0)
fp = tf.reduce_sum(tf.cast(fp, tf.float32), axis=0)
recall = tfe_math.safe_divide(tp, tf.cast(num_gbboxes, tf.float32), 'recall')
precision = tfe_math.safe_divide(tp, tp + fp, 'precision')
return tf.tuple([precision, recall])
def fmean(pre, rec):
"""Compute f-mean with precision and recall
"""
def zero():
return tf.zeros([])
def not_zero():
return 2 * pre * rec / (pre + rec)
return tf.cond(pre + rec > 0, not_zero, zero)
| 1,662 |
854 | __________________________________________________________________________________________________
sample 0 ms submission
/*
* @lc app=leetcode id=367 lang=java
*
* [367] Valid Perfect Square
*/
class Solution {
public boolean isPerfectSquare(int num) {
if (num == 1) {
return true;
}
long start = 1, end = num;
while (start < end - 1) {
long middle = start + (end - start) / 2;
long temp = middle * middle;
if (temp == (long)num) {
return true;
} else if (temp > (long)num) {
end = middle;
} else {
start = middle;
}
}
return start * start == num || end * end == num;
}
}
__________________________________________________________________________________________________
sample 31424 kb submission
class Solution {
public boolean isPerfectSquare(int num) {
int low=1, high=num, mid;
while(low<=high){
mid=low+(high-low)/2;
if(num/mid < mid){
high=mid-1;
}
else{
if(mid*mid==num){
return true;
}
low=mid+1;
}
}
return false;
}
}
__________________________________________________________________________________________________
| 627 |
2,134 | <reponame>dendisuhubdy/opentrack<filename>pose-widget/export.hpp
// generates export.hpp for each module from compat/linkage.hpp
#pragma once
#include "compat/linkage-macros.hpp"
#ifdef BUILD_POSE_WIDGET
# define OTR_POSE_WIDGET_EXPORT OTR_GENERIC_EXPORT
#else
# define OTR_POSE_WIDGET_EXPORT OTR_GENERIC_IMPORT
#endif
| 138 |
938 | package com.susion.rabbit.base.entities;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.Generated;
import org.greenrobot.greendao.annotation.Keep;
/**
* susionwang at 2020-01-14
*/
@Entity
public class RabbitAppPerformanceInfo implements RabbitInfoProtocol{
@Id(autoincrement = true)
public Long id;
public Long time;
public String fpsIds;
public String memoryIds;
public String appStartId;
public String pageSpeedIds;
public String blockIds;
public String slowMethodIds;
public Long endTime;
public boolean isRunning;
@Keep
public RabbitAppPerformanceInfo(Long id, Long time, String fpsIds,
String memoryIds, String appStartId, String pageSpeedIds,
String blockIds, String slowMethodIds, Long endTime,
boolean isRunning) {
this.id = id;
this.time = time;
this.fpsIds = fpsIds;
this.memoryIds = memoryIds;
this.appStartId = appStartId;
this.pageSpeedIds = pageSpeedIds;
this.blockIds = blockIds;
this.slowMethodIds = slowMethodIds;
this.endTime = endTime;
this.isRunning = isRunning;
}
@Generated(hash = 2095757822)
public RabbitAppPerformanceInfo() {
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public Long getTime() {
return this.time;
}
@Override
public String getPageName() {
return "";
}
public void setTime(Long time) {
this.time = time;
}
public String getFpsIds() {
return this.fpsIds;
}
public void setFpsIds(String fpsIds) {
this.fpsIds = fpsIds;
}
public String getMemoryIds() {
return this.memoryIds;
}
public void setMemoryIds(String memoryIds) {
this.memoryIds = memoryIds;
}
public String getAppStartId() {
return this.appStartId;
}
public void setAppStartId(String appStartId) {
this.appStartId = appStartId;
}
public String getPageSpeedIds() {
return this.pageSpeedIds;
}
public void setPageSpeedIds(String pageSpeedIds) {
this.pageSpeedIds = pageSpeedIds;
}
public String getBlockIds() {
return this.blockIds;
}
public void setBlockIds(String blockIds) {
this.blockIds = blockIds;
}
public String getSlowMethodIds() {
return this.slowMethodIds;
}
public void setSlowMethodIds(String slowMethodIds) {
this.slowMethodIds = slowMethodIds;
}
public Long getEndTime() {
return this.endTime;
}
public void setEndTime(Long endTime) {
this.endTime = endTime;
}
public boolean getIsRunning() {
return this.isRunning;
}
public void setIsRunning(boolean isRunning) {
this.isRunning = isRunning;
}
}
| 1,345 |
575 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_ARC_SESSION_ARC_CONTAINER_CLIENT_ADAPTER_H_
#define COMPONENTS_ARC_SESSION_ARC_CONTAINER_CLIENT_ADAPTER_H_
#include <memory>
#include "components/arc/session/arc_client_adapter.h"
namespace arc {
// Returns an adapter for talking to session_manager via D-Bus.
std::unique_ptr<ArcClientAdapter> CreateArcContainerClientAdapter();
} // namespace arc
#endif // COMPONENTS_ARC_SESSION_ARC_CONTAINER_CLIENT_ADAPTER_H_
| 200 |
2,151 | <reponame>zipated/src
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/search_engines/template_url_prepopulate_data.h"
#if defined(OS_POSIX) && !defined(OS_MACOSX)
#include <locale.h>
#endif
#include "base/logging.h"
#include "base/macros.h"
#include "base/strings/string16.h"
#include "base/strings/string_piece.h"
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "build/build_config.h"
#include "components/google/core/browser/google_util.h"
#include "components/pref_registry/pref_registry_syncable.h"
#include "components/prefs/pref_service.h"
#include "components/search_engines/prepopulated_engines.h"
#include "components/search_engines/search_engines_pref_names.h"
#include "components/search_engines/template_url_data.h"
#include "components/search_engines/template_url_data_util.h"
#include "net/base/registry_controlled_domains/registry_controlled_domain.h"
#include "url/gurl.h"
#if defined(OS_WIN)
#include <windows.h>
#undef IN // On Windows, windef.h defines this, which screws up "India" cases.
#elif defined(OS_MACOSX)
#include "base/mac/scoped_cftyperef.h"
#endif
#if defined(OS_ANDROID)
#include "base/android/locale_utils.h"
#endif
namespace TemplateURLPrepopulateData {
// Helpers --------------------------------------------------------------------
namespace {
// NOTE: You should probably not change the data in this file without changing
// |kCurrentDataVersion| in prepopulated_engines.json. See comments in
// GetDataVersion() below!
// Put the engines within each country in order with most interesting/important
// first. The default will be the first engine.
// Default (for countries with no better engine set)
const PrepopulatedEngine* const engines_default[] = {
&kiwi, &google, &google_en, &bing, &yahoo, &duckduckgo, &qwant, &liteqwant, &startpage, &yandex_ru
};
// A list of all the engines that we know about.
const PrepopulatedEngine* const kAllEngines[] = {
// Prepopulated engines:
&kiwi, &google, &google_en, &bing, &yahoo, &duckduckgo, &qwant, &liteqwant, &startpage, &yandex_ru
};
// Please refer to ISO 3166-1 for information about the two-character country
// codes; http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 is useful. In the
// following (C++) code, we pack the two letters of the country code into an int
// value we call the CountryID.
const int kCountryIDUnknown = -1;
inline int CountryCharsToCountryID(char c1, char c2) {
return c1 << 8 | c2;
}
int CountryCharsToCountryIDWithUpdate(char c1, char c2) {
// SPECIAL CASE: In 2003, Yugoslavia renamed itself to Serbia and Montenegro.
// Serbia and Montenegro dissolved their union in June 2006. Yugoslavia was
// ISO 'YU' and Serbia and Montenegro were ISO 'CS'. Serbia was subsequently
// issued 'RS' and Montenegro 'ME'. Windows XP and Mac OS X Leopard still use
// the value 'YU'. If we get a value of 'YU' or 'CS' we will map it to 'RS'.
if ((c1 == 'Y' && c2 == 'U') ||
(c1 == 'C' && c2 == 'S')) {
c1 = 'R';
c2 = 'S';
}
// SPECIAL CASE: Timor-Leste changed from 'TP' to 'TL' in 2002. Windows XP
// predates this; we therefore map this value.
if (c1 == 'T' && c2 == 'P')
c2 = 'L';
return CountryCharsToCountryID(c1, c2);
}
#if !defined(OS_WIN) && !defined(OS_MACOSX)
int CountryStringToCountryID(const std::string& country) {
return (country.length() == 2)
? CountryCharsToCountryIDWithUpdate(country[0], country[1])
: kCountryIDUnknown;
}
#endif
#if defined(OS_WIN)
// For reference, a list of GeoIDs can be found at
// http://msdn.microsoft.com/en-us/library/dd374073.aspx .
int GeoIDToCountryID(GEOID geo_id) {
const int kISOBufferSize = 3; // Two plus one for the terminator.
wchar_t isobuf[kISOBufferSize] = { 0 };
int retval = GetGeoInfo(geo_id, GEO_ISO2, isobuf, kISOBufferSize, 0);
if (retval == kISOBufferSize &&
!(isobuf[0] == L'X' && isobuf[1] == L'X'))
return CountryCharsToCountryIDWithUpdate(static_cast<char>(isobuf[0]),
static_cast<char>(isobuf[1]));
// Various locations have ISO codes that Windows does not return.
switch (geo_id) {
case 0x144: // Guernsey
return CountryCharsToCountryID('G', 'G');
case 0x148: // Jersey
return CountryCharsToCountryID('J', 'E');
case 0x3B16: // Isle of Man
return CountryCharsToCountryID('I', 'M');
// 'UM' (U.S. Minor Outlying Islands)
case 0x7F: // <NAME>
case 0x102: // Wake Island
case 0x131: // Baker Island
case 0x146: // Howland Island
case 0x147: // Jarvis Island
case 0x149: // Kingman Reef
case 0x152: // Palmyra Atoll
case 0x52FA: // Midway Islands
return CountryCharsToCountryID('U', 'M');
// 'SH' (Saint Helena)
case 0x12F: // Ascension Island
case 0x15C: // Tristan da Cunha
return CountryCharsToCountryID('S', 'H');
// 'IO' (British Indian Ocean Territory)
case 0x13A: // <NAME>
return CountryCharsToCountryID('I', 'O');
// Other cases where there is no ISO country code; we assign countries that
// can serve as reasonable defaults.
case 0x154: // Rota Island
case 0x155: // Saipan
case 0x15A: // Tinian Island
return CountryCharsToCountryID('U', 'S');
case 0x134: // Channel Islands
return CountryCharsToCountryID('G', 'B');
case 0x143: // Guantanamo Bay
default:
return kCountryIDUnknown;
}
}
#endif // defined(OS_WIN)
int GetCountryIDFromPrefs(PrefService* prefs) {
if (!prefs)
return GetCurrentCountryID();
// Cache first run Country ID value in prefs, and use it afterwards. This
// ensures that just because the user moves around, we won't automatically
// make major changes to their available search providers, which would feel
// surprising.
if (!prefs->HasPrefPath(prefs::kCountryIDAtInstall)) {
prefs->SetInteger(prefs::kCountryIDAtInstall, GetCurrentCountryID());
}
return prefs->GetInteger(prefs::kCountryIDAtInstall);
}
std::vector<std::unique_ptr<TemplateURLData>> GetPrepopulationSetFromCountryID(
int country_id) {
const PrepopulatedEngine* const* engines;
size_t num_engines;
// If you add a new country make sure to update the unit test for coverage.
switch (country_id) {
#define CHAR_A 'A'
#define CHAR_B 'B'
#define CHAR_C 'C'
#define CHAR_D 'D'
#define CHAR_E 'E'
#define CHAR_F 'F'
#define CHAR_G 'G'
#define CHAR_H 'H'
#define CHAR_I 'I'
#define CHAR_J 'J'
#define CHAR_K 'K'
#define CHAR_L 'L'
#define CHAR_M 'M'
#define CHAR_N 'N'
#define CHAR_O 'O'
#define CHAR_P 'P'
#define CHAR_Q 'Q'
#define CHAR_R 'R'
#define CHAR_S 'S'
#define CHAR_T 'T'
#define CHAR_U 'U'
#define CHAR_V 'V'
#define CHAR_W 'W'
#define CHAR_X 'X'
#define CHAR_Y 'Y'
#define CHAR_Z 'Z'
#define CHAR(ch) CHAR_##ch
#define CODE_TO_ID(code1, code2)\
(CHAR(code1) << 8 | CHAR(code2))
#define UNHANDLED_COUNTRY(code1, code2)\
case CODE_TO_ID(code1, code2):
#define END_UNHANDLED_COUNTRIES(code1, code2)\
engines = engines_##code1##code2;\
num_engines = arraysize(engines_##code1##code2);\
break;
#define DECLARE_COUNTRY(code1, code2)\
UNHANDLED_COUNTRY(code1, code2)\
END_UNHANDLED_COUNTRIES(code1, code2)
// Countries with their own, dedicated engine set.
UNHANDLED_COUNTRY(A, E) // United Arab Emirates
UNHANDLED_COUNTRY(A, L) // Albania
UNHANDLED_COUNTRY(A, R) // Argentina
UNHANDLED_COUNTRY(A, T) // Austria
UNHANDLED_COUNTRY(A, U) // Australia
UNHANDLED_COUNTRY(B, A) // Bosnia and Herzegovina
UNHANDLED_COUNTRY(B, E) // Belgium
UNHANDLED_COUNTRY(B, G) // Bulgaria
UNHANDLED_COUNTRY(B, H) // Bahrain
UNHANDLED_COUNTRY(B, I) // Burundi
UNHANDLED_COUNTRY(B, N) // Brunei
UNHANDLED_COUNTRY(B, O) // Bolivia
UNHANDLED_COUNTRY(B, R) // Brazil
UNHANDLED_COUNTRY(B, Y) // Belarus
UNHANDLED_COUNTRY(B, Z) // Belize
UNHANDLED_COUNTRY(C, A) // Canada
UNHANDLED_COUNTRY(C, H) // Switzerland
UNHANDLED_COUNTRY(C, L) // Chile
UNHANDLED_COUNTRY(C, N) // China
UNHANDLED_COUNTRY(C, O) // Colombia
UNHANDLED_COUNTRY(C, R) // Costa Rica
UNHANDLED_COUNTRY(C, Z) // Czech Republic
UNHANDLED_COUNTRY(D, E) // Germany
UNHANDLED_COUNTRY(D, K) // Denmark
UNHANDLED_COUNTRY(D, O) // Dominican Republic
UNHANDLED_COUNTRY(D, Z) // Algeria
UNHANDLED_COUNTRY(E, C) // Ecuador
UNHANDLED_COUNTRY(E, E) // Estonia
UNHANDLED_COUNTRY(E, G) // Egypt
UNHANDLED_COUNTRY(E, S) // Spain
UNHANDLED_COUNTRY(F, I) // Finland
UNHANDLED_COUNTRY(F, O) // Faroe Islands
UNHANDLED_COUNTRY(F, R) // France
UNHANDLED_COUNTRY(G, B) // United Kingdom
UNHANDLED_COUNTRY(G, R) // Greece
UNHANDLED_COUNTRY(G, T) // Guatemala
UNHANDLED_COUNTRY(H, K) // Hong Kong
UNHANDLED_COUNTRY(H, N) // Honduras
UNHANDLED_COUNTRY(H, R) // Croatia
UNHANDLED_COUNTRY(H, U) // Hungary
UNHANDLED_COUNTRY(I, D) // Indonesia
UNHANDLED_COUNTRY(I, E) // Ireland
UNHANDLED_COUNTRY(I, L) // Israel
UNHANDLED_COUNTRY(I, N) // India
UNHANDLED_COUNTRY(I, Q) // Iraq
UNHANDLED_COUNTRY(I, R) // Iran
UNHANDLED_COUNTRY(I, S) // Iceland
UNHANDLED_COUNTRY(I, T) // Italy
UNHANDLED_COUNTRY(J, M) // Jamaica
UNHANDLED_COUNTRY(J, O) // Jordan
UNHANDLED_COUNTRY(J, P) // Japan
UNHANDLED_COUNTRY(K, E) // Kenya
UNHANDLED_COUNTRY(K, R) // South Korea
UNHANDLED_COUNTRY(K, W) // Kuwait
UNHANDLED_COUNTRY(K, Z) // Kazakhstan
UNHANDLED_COUNTRY(L, B) // Lebanon
UNHANDLED_COUNTRY(L, I) // Liechtenstein
UNHANDLED_COUNTRY(L, T) // Lithuania
UNHANDLED_COUNTRY(L, U) // Luxembourg
UNHANDLED_COUNTRY(L, V) // Latvia
UNHANDLED_COUNTRY(L, Y) // Libya
UNHANDLED_COUNTRY(M, A) // Morocco
UNHANDLED_COUNTRY(M, C) // Monaco
UNHANDLED_COUNTRY(M, D) // Moldova
UNHANDLED_COUNTRY(M, E) // Montenegro
UNHANDLED_COUNTRY(M, K) // Macedonia
UNHANDLED_COUNTRY(M, X) // Mexico
UNHANDLED_COUNTRY(M, Y) // Malaysia
UNHANDLED_COUNTRY(N, I) // Nicaragua
UNHANDLED_COUNTRY(N, L) // Netherlands
UNHANDLED_COUNTRY(N, O) // Norway
UNHANDLED_COUNTRY(N, Z) // New Zealand
UNHANDLED_COUNTRY(O, M) // Oman
UNHANDLED_COUNTRY(P, A) // Panama
UNHANDLED_COUNTRY(P, E) // Peru
UNHANDLED_COUNTRY(P, H) // Philippines
UNHANDLED_COUNTRY(P, K) // Pakistan
UNHANDLED_COUNTRY(P, L) // Poland
UNHANDLED_COUNTRY(P, R) // Puerto Rico
UNHANDLED_COUNTRY(P, T) // Portugal
UNHANDLED_COUNTRY(P, Y) // Paraguay
UNHANDLED_COUNTRY(Q, A) // Qatar
UNHANDLED_COUNTRY(R, O) // Romania
UNHANDLED_COUNTRY(R, S) // Serbia
UNHANDLED_COUNTRY(R, U) // Russia
UNHANDLED_COUNTRY(R, W) // Rwanda
UNHANDLED_COUNTRY(S, A) // Saudi Arabia
UNHANDLED_COUNTRY(S, E) // Sweden
UNHANDLED_COUNTRY(S, G) // Singapore
UNHANDLED_COUNTRY(S, I) // Slovenia
UNHANDLED_COUNTRY(S, K) // Slovakia
UNHANDLED_COUNTRY(S, V) // El Salvador
UNHANDLED_COUNTRY(S, Y) // Syria
UNHANDLED_COUNTRY(T, H) // Thailand
UNHANDLED_COUNTRY(T, N) // Tunisia
UNHANDLED_COUNTRY(T, R) // Turkey
UNHANDLED_COUNTRY(T, T) // Trinidad and Tobago
UNHANDLED_COUNTRY(T, W) // Taiwan
UNHANDLED_COUNTRY(T, Z) // Tanzania
UNHANDLED_COUNTRY(U, A) // Ukraine
UNHANDLED_COUNTRY(U, S) // United States
UNHANDLED_COUNTRY(U, Y) // Uruguay
UNHANDLED_COUNTRY(V, E) // Venezuela
UNHANDLED_COUNTRY(V, N) // Vietnam
UNHANDLED_COUNTRY(Y, E) // Yemen
UNHANDLED_COUNTRY(Z, A) // South Africa
UNHANDLED_COUNTRY(Z, W) // Zimbabwe
// Countries using the "Australia" engine set.
UNHANDLED_COUNTRY(C, C) // Cocos Islands
UNHANDLED_COUNTRY(C, X) // Christmas Island
UNHANDLED_COUNTRY(H, M) // Heard Island and McDonald Islands
UNHANDLED_COUNTRY(N, F) // Norfolk Island
// Countries using the "China" engine set.
UNHANDLED_COUNTRY(M, O) // Macao
// Countries using the "Denmark" engine set.
UNHANDLED_COUNTRY(G, L) // Greenland
// Countries using the "Spain" engine set.
UNHANDLED_COUNTRY(A, D) // Andorra
// Countries using the "Finland" engine set.
UNHANDLED_COUNTRY(A, X) // Aland Islands
// Countries using the "France" engine set.
UNHANDLED_COUNTRY(B, F) // Burkina Faso
UNHANDLED_COUNTRY(B, J) // Benin
UNHANDLED_COUNTRY(C, D) // Congo - Kinshasa
UNHANDLED_COUNTRY(C, F) // Central African Republic
UNHANDLED_COUNTRY(C, G) // Congo - Brazzaville
UNHANDLED_COUNTRY(C, I) // Ivory Coast
UNHANDLED_COUNTRY(C, M) // Cameroon
UNHANDLED_COUNTRY(D, J) // Djibouti
UNHANDLED_COUNTRY(G, A) // Gabon
UNHANDLED_COUNTRY(G, F) // French Guiana
UNHANDLED_COUNTRY(G, N) // Guinea
UNHANDLED_COUNTRY(G, P) // Guadeloupe
UNHANDLED_COUNTRY(H, T) // Haiti
#if defined(OS_WIN)
UNHANDLED_COUNTRY(I, P) // Clipperton Island ('IP' is an WinXP-ism; ISO
// includes it with France)
#endif
UNHANDLED_COUNTRY(M, L) // Mali
UNHANDLED_COUNTRY(M, Q) // Martinique
UNHANDLED_COUNTRY(N, C) // New Caledonia
UNHANDLED_COUNTRY(N, E) // Niger
UNHANDLED_COUNTRY(P, F) // French Polynesia
UNHANDLED_COUNTRY(P, M) // Saint Pierre and Miquelon
UNHANDLED_COUNTRY(R, E) // Reunion
UNHANDLED_COUNTRY(S, N) // Senegal
UNHANDLED_COUNTRY(T, D) // Chad
UNHANDLED_COUNTRY(T, F) // French Southern Territories
UNHANDLED_COUNTRY(T, G) // Togo
UNHANDLED_COUNTRY(W, F) // Wallis and Futuna
UNHANDLED_COUNTRY(Y, T) // Mayotte
// Countries using the "Greece" engine set.
UNHANDLED_COUNTRY(C, Y) // Cyprus
// Countries using the "Italy" engine set.
UNHANDLED_COUNTRY(S, M) // San Marino
UNHANDLED_COUNTRY(V, A) // Vatican
// Countries using the "Morocco" engine set.
UNHANDLED_COUNTRY(E, H) // Western Sahara
// Countries using the "Netherlands" engine set.
UNHANDLED_COUNTRY(A, N) // Netherlands Antilles
UNHANDLED_COUNTRY(A, W) // Aruba
// Countries using the "Norway" engine set.
UNHANDLED_COUNTRY(B, V) // Bouvet Island
UNHANDLED_COUNTRY(S, J) // Svalbard and Jan Mayen
// Countries using the "New Zealand" engine set.
UNHANDLED_COUNTRY(C, K) // Cook Islands
UNHANDLED_COUNTRY(N, U) // Niue
UNHANDLED_COUNTRY(T, K) // Tokelau
// Countries using the "Portugal" engine set.
UNHANDLED_COUNTRY(C, V) // Cape Verde
UNHANDLED_COUNTRY(G, W) // Guinea-Bissau
UNHANDLED_COUNTRY(M, Z) // Mozambique
UNHANDLED_COUNTRY(S, T) // Sao Tome and Principe
UNHANDLED_COUNTRY(T, L) // Timor-Leste
// Countries using the "Russia" engine set.
UNHANDLED_COUNTRY(A, M) // Armenia
UNHANDLED_COUNTRY(A, Z) // Azerbaijan
UNHANDLED_COUNTRY(K, G) // Kyrgyzstan
UNHANDLED_COUNTRY(T, J) // Tajikistan
UNHANDLED_COUNTRY(T, M) // Turkmenistan
UNHANDLED_COUNTRY(U, Z) // Uzbekistan
// Countries using the "Saudi Arabia" engine set.
UNHANDLED_COUNTRY(M, R) // Mauritania
UNHANDLED_COUNTRY(P, S) // Palestinian Territory
UNHANDLED_COUNTRY(S, D) // Sudan
// Countries using the "United Kingdom" engine set.
UNHANDLED_COUNTRY(B, M) // Bermuda
UNHANDLED_COUNTRY(F, K) // Falkland Islands
UNHANDLED_COUNTRY(G, G) // Guernsey
UNHANDLED_COUNTRY(G, I) // Gibraltar
UNHANDLED_COUNTRY(G, S) // South Georgia and the South Sandwich
// Islands
UNHANDLED_COUNTRY(I, M) // Isle of Man
UNHANDLED_COUNTRY(I, O) // British Indian Ocean Territory
UNHANDLED_COUNTRY(J, E) // Jersey
UNHANDLED_COUNTRY(K, Y) // Cayman Islands
UNHANDLED_COUNTRY(M, S) // Montserrat
UNHANDLED_COUNTRY(M, T) // Malta
UNHANDLED_COUNTRY(P, N) // Pitcairn Islands
UNHANDLED_COUNTRY(S, H) // Saint Helena, Ascension Island, and Tristan da
// Cunha
UNHANDLED_COUNTRY(T, C) // Turks and Caicos Islands
UNHANDLED_COUNTRY(V, G) // British Virgin Islands
// Countries using the "United States" engine set.
UNHANDLED_COUNTRY(A, S) // American Samoa
UNHANDLED_COUNTRY(G, U) // Guam
UNHANDLED_COUNTRY(M, P) // Northern Mariana Islands
UNHANDLED_COUNTRY(U, M) // U.S. Minor Outlying Islands
UNHANDLED_COUNTRY(V, I) // U.S. Virgin Islands
// Countries using the "default" engine set.
UNHANDLED_COUNTRY(A, F) // Afghanistan
UNHANDLED_COUNTRY(A, G) // Antigua and Barbuda
UNHANDLED_COUNTRY(A, I) // Anguilla
UNHANDLED_COUNTRY(A, O) // Angola
UNHANDLED_COUNTRY(A, Q) // Antarctica
UNHANDLED_COUNTRY(B, B) // Barbados
UNHANDLED_COUNTRY(B, D) // Bangladesh
UNHANDLED_COUNTRY(B, S) // Bahamas
UNHANDLED_COUNTRY(B, T) // Bhutan
UNHANDLED_COUNTRY(B, W) // Botswana
UNHANDLED_COUNTRY(C, U) // Cuba
UNHANDLED_COUNTRY(D, M) // Dominica
UNHANDLED_COUNTRY(E, R) // Eritrea
UNHANDLED_COUNTRY(E, T) // Ethiopia
UNHANDLED_COUNTRY(F, J) // Fiji
UNHANDLED_COUNTRY(F, M) // Micronesia
UNHANDLED_COUNTRY(G, D) // Grenada
UNHANDLED_COUNTRY(G, E) // Georgia
UNHANDLED_COUNTRY(G, H) // Ghana
UNHANDLED_COUNTRY(G, M) // Gambia
UNHANDLED_COUNTRY(G, Q) // Equatorial Guinea
UNHANDLED_COUNTRY(G, Y) // Guyana
UNHANDLED_COUNTRY(K, H) // Cambodia
UNHANDLED_COUNTRY(K, I) // Kiribati
UNHANDLED_COUNTRY(K, M) // Comoros
UNHANDLED_COUNTRY(K, N) // Saint Kitts and Nevis
UNHANDLED_COUNTRY(K, P) // North Korea
UNHANDLED_COUNTRY(L, A) // Laos
UNHANDLED_COUNTRY(L, C) // Saint Lucia
UNHANDLED_COUNTRY(L, K) // Sri Lanka
UNHANDLED_COUNTRY(L, R) // Liberia
UNHANDLED_COUNTRY(L, S) // Lesotho
UNHANDLED_COUNTRY(M, G) // Madagascar
UNHANDLED_COUNTRY(M, H) // Marshall Islands
UNHANDLED_COUNTRY(M, M) // Myanmar
UNHANDLED_COUNTRY(M, N) // Mongolia
UNHANDLED_COUNTRY(M, U) // Mauritius
UNHANDLED_COUNTRY(M, V) // Maldives
UNHANDLED_COUNTRY(M, W) // Malawi
UNHANDLED_COUNTRY(N, A) // Namibia
UNHANDLED_COUNTRY(N, G) // Nigeria
UNHANDLED_COUNTRY(N, P) // Nepal
UNHANDLED_COUNTRY(N, R) // Nauru
UNHANDLED_COUNTRY(P, G) // Papua New Guinea
UNHANDLED_COUNTRY(P, W) // Palau
UNHANDLED_COUNTRY(S, B) // Solomon Islands
UNHANDLED_COUNTRY(S, C) // Seychelles
UNHANDLED_COUNTRY(S, L) // Sierra Leone
UNHANDLED_COUNTRY(S, O) // Somalia
UNHANDLED_COUNTRY(S, R) // Suriname
UNHANDLED_COUNTRY(S, Z) // Swaziland
UNHANDLED_COUNTRY(T, O) // Tonga
UNHANDLED_COUNTRY(T, V) // Tuvalu
UNHANDLED_COUNTRY(U, G) // Uganda
UNHANDLED_COUNTRY(V, C) // Saint Vincent and the Grenadines
UNHANDLED_COUNTRY(V, U) // Vanuatu
UNHANDLED_COUNTRY(W, S) // Samoa
UNHANDLED_COUNTRY(Z, M) // Zambia
case kCountryIDUnknown:
default: // Unhandled location
END_UNHANDLED_COUNTRIES(def, ault)
}
std::vector<std::unique_ptr<TemplateURLData>> t_urls;
for (size_t i = 0; i < num_engines; ++i)
t_urls.push_back(TemplateURLDataFromPrepopulatedEngine(*engines[i]));
return t_urls;
}
std::vector<std::unique_ptr<TemplateURLData>> GetPrepopulatedTemplateURLData(
PrefService* prefs) {
std::vector<std::unique_ptr<TemplateURLData>> t_urls;
if (!prefs)
return t_urls;
const base::ListValue* list = prefs->GetList(prefs::kSearchProviderOverrides);
if (!list)
return t_urls;
size_t num_engines = list->GetSize();
for (size_t i = 0; i != num_engines; ++i) {
const base::DictionaryValue* engine;
if (list->GetDictionary(i, &engine)) {
auto t_url = TemplateURLDataFromOverrideDictionary(*engine);
if (t_url) {
t_urls.push_back(std::move(t_url));
}
}
}
return t_urls;
}
bool SameDomain(const GURL& given_url, const GURL& prepopulated_url) {
return prepopulated_url.is_valid() &&
net::registry_controlled_domains::SameDomainOrHost(
given_url, prepopulated_url,
net::registry_controlled_domains::INCLUDE_PRIVATE_REGISTRIES);
}
} // namespace
// Global functions -----------------------------------------------------------
void RegisterProfilePrefs(user_prefs::PrefRegistrySyncable* registry) {
registry->RegisterIntegerPref(prefs::kCountryIDAtInstall, kCountryIDUnknown);
registry->RegisterListPref(prefs::kSearchProviderOverrides);
registry->RegisterIntegerPref(prefs::kSearchProviderOverridesVersion, -1);
}
int GetDataVersion(PrefService* prefs) {
// Allow tests to override the local version.
return (prefs && prefs->HasPrefPath(prefs::kSearchProviderOverridesVersion)) ?
prefs->GetInteger(prefs::kSearchProviderOverridesVersion) :
kCurrentDataVersion;
}
std::vector<std::unique_ptr<TemplateURLData>> GetPrepopulatedEngines(
PrefService* prefs,
size_t* default_search_provider_index) {
// If there is a set of search engines in the preferences file, it overrides
// the built-in set.
if (default_search_provider_index)
*default_search_provider_index = 0;
std::vector<std::unique_ptr<TemplateURLData>> t_urls =
GetPrepopulatedTemplateURLData(prefs);
if (!t_urls.empty())
return t_urls;
return GetPrepopulationSetFromCountryID(GetCountryIDFromPrefs(prefs));
}
std::unique_ptr<TemplateURLData> GetPrepopulatedEngine(PrefService* prefs,
int prepopulated_id) {
size_t default_index;
auto engines =
TemplateURLPrepopulateData::GetPrepopulatedEngines(prefs, &default_index);
for (auto& engine : engines) {
if (engine->prepopulate_id == prepopulated_id)
return std::move(engine);
}
return nullptr;
}
#if defined(OS_ANDROID)
std::vector<std::unique_ptr<TemplateURLData>> GetLocalPrepopulatedEngines(
const std::string& locale) {
int country_id = CountryStringToCountryID(locale);
if (country_id == kCountryIDUnknown) {
LOG(ERROR) << "Unknown country code specified: " << locale;
return std::vector<std::unique_ptr<TemplateURLData>>();
}
return GetPrepopulationSetFromCountryID(country_id);
}
#endif
std::vector<const PrepopulatedEngine*> GetAllPrepopulatedEngines() {
return std::vector<const PrepopulatedEngine*>(std::begin(kAllEngines),
std::end(kAllEngines));
}
void ClearPrepopulatedEnginesInPrefs(PrefService* prefs) {
if (!prefs)
return;
prefs->ClearPref(prefs::kSearchProviderOverrides);
prefs->ClearPref(prefs::kSearchProviderOverridesVersion);
}
std::unique_ptr<TemplateURLData> GetPrepopulatedDefaultSearch(
PrefService* prefs) {
size_t default_search_index;
// This could be more efficient. We are loading all the URLs to only keep
// the first one.
std::vector<std::unique_ptr<TemplateURLData>> loaded_urls =
GetPrepopulatedEngines(prefs, &default_search_index);
return (default_search_index < loaded_urls.size())
? std::move(loaded_urls[default_search_index])
: nullptr;
}
SearchEngineType GetEngineType(const GURL& url) {
DCHECK(url.is_valid());
// Check using TLD+1s, in order to more aggressively match search engine types
// for data imported from other browsers.
//
// First special-case Google, because the prepopulate URL for it will not
// convert to a GURL and thus won't have an origin. Instead see if the
// incoming URL's host is "[*.]google.<TLD>".
if (google_util::IsGoogleHostname(url.host(),
google_util::DISALLOW_SUBDOMAIN))
return google.type;
// Now check the rest of the prepopulate data.
for (size_t i = 0; i < arraysize(kAllEngines); ++i) {
// First check the main search URL.
if (SameDomain(url, GURL(kAllEngines[i]->search_url)))
return kAllEngines[i]->type;
// Then check the alternate URLs.
for (size_t j = 0; j < kAllEngines[i]->alternate_urls_size; ++j) {
if (SameDomain(url, GURL(kAllEngines[i]->alternate_urls[j])))
return kAllEngines[i]->type;
}
}
return SEARCH_ENGINE_OTHER;
}
#if defined(OS_WIN)
int GetCurrentCountryID() {
return GeoIDToCountryID(GetUserGeoID(GEOCLASS_NATION));
}
#elif defined(OS_MACOSX)
int GetCurrentCountryID() {
base::ScopedCFTypeRef<CFLocaleRef> locale(CFLocaleCopyCurrent());
CFStringRef country = (CFStringRef)CFLocaleGetValue(locale.get(),
kCFLocaleCountryCode);
if (!country)
return kCountryIDUnknown;
UniChar isobuf[2];
CFRange char_range = CFRangeMake(0, 2);
CFStringGetCharacters(country, char_range, isobuf);
return CountryCharsToCountryIDWithUpdate(static_cast<char>(isobuf[0]),
static_cast<char>(isobuf[1]));
}
#elif defined(OS_ANDROID)
int GetCurrentCountryID() {
return CountryStringToCountryID(base::android::GetDefaultCountryCode());
}
#elif defined(OS_POSIX)
int GetCurrentCountryID() {
const char* locale = setlocale(LC_MESSAGES, nullptr);
if (!locale)
return kCountryIDUnknown;
// The format of a locale name is:
// language[_territory][.codeset][@modifier], where territory is an ISO 3166
// country code, which is what we want.
// First remove the language portion.
std::string locale_str(locale);
size_t territory_delim = locale_str.find('_');
if (territory_delim == std::string::npos)
return kCountryIDUnknown;
locale_str.erase(0, territory_delim + 1);
// Next remove any codeset/modifier portion and uppercase.
return CountryStringToCountryID(
base::ToUpperASCII(locale_str.substr(0, locale_str.find_first_of(".@"))));
}
#endif // OS_*
} // namespace TemplateURLPrepopulateData
| 10,980 |
10,225 | <reponame>CraigMcDonaldCodes/quarkus
package io.quarkus.deployment.builditem;
import io.quarkus.builder.item.SimpleBuildItem;
/**
* Marker used only to ensure that the file system resources where properly written in dev-mode
*/
public final class GeneratedFileSystemResourceHandledBuildItem extends SimpleBuildItem {
}
| 93 |
12,278 | /*=============================================================================
Copyright (c) 2019 <NAME>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#ifdef BOOST_BUILD_PCH_ENABLED
#include <boost/spirit/include/classic.hpp>
#include <boost/core/lightweight_test.hpp>
#include <vector>
#include <string>
#include <iostream>
#endif
| 149 |
383 | """The tests for the ATC ble_parser."""
from ble_monitor.ble_parser import BleParser
class TestATC:
"""Tests for the ATC parser"""
def test_atc_atc1441(self):
"""Test ATC parser for ATC 1441 format."""
data_string = "043e1d02010000f4830238c1a41110161a18a4c1380283f400a22f5f0bf819df"
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser()
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg["firmware"] == "ATC (Atc1441)"
assert sensor_msg["type"] == "ATC"
assert sensor_msg["mac"] == "A4C1380283F4"
assert sensor_msg["packet"] == 25
assert sensor_msg["data"]
assert sensor_msg["temperature"] == 16.2
assert sensor_msg["humidity"] == 47
assert sensor_msg["voltage"] == 3.064
assert sensor_msg["battery"] == 95
assert sensor_msg["rssi"] == -33
def test_atc_atc1441_ext(self):
"""Test ATC parser for ATC 1441 format (extended advertisement)."""
data_string = "043E2B0D011300004E7CBC38C1A40100FF7FB90000000000000000001110161A18A4C138BC7C4E0102284F0B6720"
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser()
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg["firmware"] == "ATC (Atc1441)"
assert sensor_msg["type"] == "ATC"
assert sensor_msg["mac"] == "A4C138BC7C4E"
assert sensor_msg["packet"] == 32
assert sensor_msg["data"]
assert sensor_msg["temperature"] == 25.8
assert sensor_msg["humidity"] == 40
assert sensor_msg["voltage"] == 2.919
assert sensor_msg["battery"] == 79
assert sensor_msg["rssi"] == -71
def test_atc_custom(self):
"""Test ATC parser for ATC custom format."""
data_string = "043e1f02010000f4830238c1a41312161a18f4830238c1a4a9066911b60b58f70dde"
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser()
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg["firmware"] == "ATC (Custom)"
assert sensor_msg["type"] == "ATC"
assert sensor_msg["mac"] == "A4C1380283F4"
assert sensor_msg["packet"] == 247
assert sensor_msg["data"]
assert sensor_msg["temperature"] == 17.05
assert sensor_msg["humidity"] == 44.57
assert sensor_msg["voltage"] == 2.998
assert sensor_msg["battery"] == 88
assert sensor_msg["rssi"] == -34
def test_atc_custom_v2_9(self):
"""Test ATC parser for ATC custom format (firmware version 2.9 and above)."""
data_string = "043E2202010000B2188D38C1A41602010612161A18B2188D38C1A42B089011F70A43200FC2"
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser()
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg["firmware"] == "ATC (Custom)"
assert sensor_msg["type"] == "ATC"
assert sensor_msg["mac"] == "A4C1388D18B2"
assert sensor_msg["packet"] == 32
assert sensor_msg["data"]
assert sensor_msg["temperature"] == 20.91
assert sensor_msg["humidity"] == 44.96
assert sensor_msg["voltage"] == 2.807
assert sensor_msg["battery"] == 67
assert sensor_msg["rssi"] == -62
def test_atc_custom_ext(self):
"""Test ATC parser for ATC custom format (extended format)."""
data_string = "043E300D011300008B376338C1A40100FF7FBA0000000000000000001602010612161A188B376338C1A4CE0913107F0B521204"
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser()
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg["firmware"] == "ATC (Custom)"
assert sensor_msg["type"] == "ATC"
assert sensor_msg["mac"] == "A4C13863378B"
assert sensor_msg["packet"] == 18
assert sensor_msg["data"]
assert sensor_msg["temperature"] == 25.1
assert sensor_msg["humidity"] == 41.15
assert sensor_msg["voltage"] == 2.943
assert sensor_msg["battery"] == 82
assert sensor_msg["rssi"] == -70
def test_atc_custom_encrypted(self):
"""Test ATC parser for ATC custom format (encrypted)."""
self.aeskeys = {}
data_string = "043e1b02010000b2188d38c1a40f0e161a1811d603fbfa7b6dfb1e26fde2"
data = bytes(bytearray.fromhex(data_string))
aeskey = "b9ea895fac7eea6d30532432a516f3a3"
is_ext_packet = True if data[3] == 0x0D else False
mac = (data[8 if is_ext_packet else 7:14 if is_ext_packet else 13])[::-1]
mac_address = mac.hex()
p_mac = bytes.fromhex(mac_address.replace(":", "").lower())
p_key = bytes.fromhex(aeskey.lower())
self.aeskeys[p_mac] = p_key
# pylint: disable=unused-variable
ble_parser = BleParser(aeskeys=self.aeskeys)
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg["firmware"] == "ATC (Custom encrypted)"
assert sensor_msg["type"] == "ATC"
assert sensor_msg["mac"] == "A4C1388D18B2"
assert sensor_msg["packet"] == 17
assert sensor_msg["data"]
assert sensor_msg["temperature"] == 23.45
assert sensor_msg["humidity"] == 41.73
assert sensor_msg["voltage"] == 2.749
assert sensor_msg["battery"] == 61
assert sensor_msg["rssi"] == -30
| 2,551 |
1,037 | package com.nostra13.universalimageloader.core.download;
import org.fest.assertions.api.Assertions;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import com.nostra13.universalimageloader.core.download.ImageDownloader.Scheme;
@RunWith(RobolectricTestRunner.class)
public class BaseImageDownloaderTest {
@Test
public void testSchemeHttp() throws Exception {
String uri = "http://image.com/1.png";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.HTTP;
Assertions.assertThat(result).isEqualTo(expected);
}
@Test
public void testSchemeHttps() throws Exception {
String uri = "https://image.com/1.png";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.HTTPS;
Assertions.assertThat(result).isEqualTo(expected);
}
@Test
public void testSchemeContent() throws Exception {
String uri = "content://path/to/content";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.CONTENT;
Assertions.assertThat(result).isEqualTo(expected);
}
@Test
public void testSchemeAssets() throws Exception {
String uri = "assets://folder/1.png";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.ASSETS;
Assertions.assertThat(result).isEqualTo(expected);
}
@Test
public void testSchemeDrawables() throws Exception {
String uri = "drawable://123456890";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.DRAWABLE;
Assertions.assertThat(result).isEqualTo(expected);
}
@Test
public void testSchemeFile() throws Exception {
String uri = "file://path/on/the/device/1.png";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.FILE;
Assertions.assertThat(result).isEqualTo(expected);
}
@Test
public void testSchemeUnknown() throws Exception {
String uri = "other://image.com/1.png";
Scheme result = Scheme.ofUri(uri);
Scheme expected = Scheme.UNKNOWN;
Assertions.assertThat(result).isEqualTo(expected);
}
}
| 707 |
373 | <gh_stars>100-1000
/*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2015 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.httpcache.config.impl.keys;
import com.adobe.acs.commons.httpcache.config.HttpCacheConfig;
import com.adobe.acs.commons.httpcache.keys.AbstractCacheKey;
import com.adobe.acs.commons.httpcache.keys.CacheKey;
import com.day.cq.commons.PathInfo;
import com.google.common.base.Objects;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.request.RequestPathInfo;
/**
* RequestPathCacheKey. Not used currently.
* <p>
* Generated keys contain resource path, selector and extension.
* </p>
*
* @author <EMAIL>
* @since 2018-05-03
*/
public class RequestPathCacheKey extends AbstractCacheKey implements CacheKey {
private static final long serialVersionUID = 1;
private final String selector;
private final String extension;
public RequestPathCacheKey(SlingHttpServletRequest request, HttpCacheConfig cacheConfig) {
super(request, cacheConfig);
RequestPathInfo pathInfo = request.getRequestPathInfo();
selector = pathInfo.getSelectorString();
extension = pathInfo.getExtension();
}
public RequestPathCacheKey(String uri, HttpCacheConfig cacheConfig) {
super(uri, cacheConfig);
RequestPathInfo pathInfo = new PathInfo(uri);
selector = pathInfo.getSelectorString();
extension = pathInfo.getExtension();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RequestPathCacheKey that = (RequestPathCacheKey) o;
return Objects.equal(getSelector(), that.getSelector())
&& Objects.equal(getExtension(), that.getExtension())
&& Objects.equal(getResourcePath(), that.getResourcePath());
}
@Override
public int hashCode() {
return Objects.hashCode(getSelector(), getExtension(), getResourcePath());
}
@Override
public String toString() {
return resourcePath + "." + selector + "." + extension;
}
public String getSelector() {
return selector;
}
public String getExtension() {
return extension;
}
}
| 996 |
2,063 | package com.example.demo.scene.live.huya;
import android.app.Activity;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.EditText;
import android.widget.PopupWindow;
import android.widget.TextView;
import com.effective.R;
import com.effective.android.panel.PanelSwitchHelper;
import com.effective.android.panel.interfaces.listener.OnPanelChangeListener;
import com.effective.android.panel.view.panel.IPanelView;
import com.effective.android.panel.view.panel.PanelView;
import com.example.demo.scene.chat.emotion.EmotionPagerView;
import com.example.demo.scene.chat.emotion.Emotions;
import com.example.demo.util.DisplayUtils;
public class PcHuyaCommentPopWindow extends PopupWindow {
private Activity activity;
private PanelSwitchHelper mHelper;
public PcHuyaCommentPopWindow(final Activity activity) {
super(activity);
this.activity = activity;
final View view = LayoutInflater.from(activity).inflate(R.layout.pop_huya_live_comment_layout, null, false);
setFocusable(true);
setWidth(WindowManager.LayoutParams.MATCH_PARENT);
setHeight(WindowManager.LayoutParams.MATCH_PARENT);
setOutsideTouchable(true);
ColorDrawable dw = new ColorDrawable(Color.TRANSPARENT);
setBackgroundDrawable(dw);
setInputMethodMode(PopupWindow.INPUT_METHOD_NEEDED);
setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE);
setContentView(view);
view.findViewById(R.id.send).setOnClickListener(v -> ((EditText) view.findViewById(R.id.input)).setText(""));
((EditText) view.findViewById(R.id.input)).addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
view.findViewById(R.id.send).setEnabled(s.length() != 0);
}
});
if (mHelper == null) {
mHelper = new PanelSwitchHelper.Builder(activity.getWindow(), getContentView())
.addPanelChangeListener(new OnPanelChangeListener() {
@Override
public void onKeyboard() {
getContentView().findViewById(R.id.emotion_btn).setSelected(false);
}
@Override
public void onNone() {
getContentView().findViewById(R.id.emotion_btn).setSelected(false);
dismiss();
}
@Override
public void onPanel(IPanelView panelView) {
getContentView().findViewById(R.id.emotion_btn).setSelected(true);
}
@Override
public void onPanelSizeChange(IPanelView panelView, boolean portrait, int oldWidth, int oldHeight, int width, int height) {
if (panelView instanceof PanelView) {
switch (((PanelView) panelView).getId()) {
case R.id.panel_emotion: {
EmotionPagerView pagerView = getContentView().findViewById(R.id.view_pager);
int viewPagerSize = height - DisplayUtils.dip2px(activity, 30f);
pagerView.buildEmotionViews(
getContentView().findViewById(R.id.pageIndicatorView),
getContentView().findViewById(R.id.input),
Emotions.getEmotions(), width, viewPagerSize);
break;
}
}
}
}
})
.logTrack(true)
.build(false);
}
}
@Override
public void showAtLocation(View parent, int gravity, int x, int y) {
getContentView().findViewById(R.id.emotion_btn).setSelected(false);
getContentView().findViewById(R.id.send).setSelected(true);
((TextView)getContentView().findViewById(R.id.input)).setText("");
super.showAtLocation(parent, gravity, x, y);
}
public void showKeyboard() {
if (mHelper != null) {
mHelper.toKeyboardState();
}
}
@Override
public void dismiss() {
if (mHelper != null && mHelper.hookSystemBackByPanelSwitcher()) {
return;
}
super.dismiss();
}
}
| 2,510 |
410 | {
"authors": [],
"date_download": "2018-01-01T22:41:28",
"date_modify": null,
"date_publish": "2017-12-26T00:00:00",
"description": "<NAME> of Erie, Pa., received a bill of more than $284 billion. The Erie Times News reports that's more than the combined national debts of Hungary and South Africa.",
"filename": "https%3A%2F%2Fwww.npr.org%2F2017%2F12%2F26%2F573464352%2Fhomeowner-questions-exceedingly-high-electric-bill.json",
"image_url": "https://media.npr.org/include/images/facebook-default-wide.jpg?s=1400",
"language": "en",
"localpath": null,
"source_domain": "www.npr.org",
"text": "Homeowner Questions Exceedingly High Electric Bill\n<NAME> of Erie, Pa., received a bill of more than $284 billion. The Erie Times News reports that's more than the combined national debts of Hungary and South Africa.\nDAVID GREENE, HOST:\nGood morning. I'm <NAME>. You ever get the electric bill and wonder why it's so high this month? Well, <NAME> of Erie, Pa., was in that boat. She thought maybe her Christmas lights had done something wacky because her bill was more than $284 billion. As the Erie Times-News reported, that is more than the combined national debts of Hungary and South Africa. The power company acknowledged the mistake and said they really appreciated the customer reaching out.\nCopyright \u00a9 2017 NPR. All rights reserved. Visit our website terms of use and permissions pages at www.npr.org for further information.\nNPR transcripts are created on a rush deadline by Verb8tm, Inc., an NPR contractor, and produced using a proprietary transcription process developed with NPR. This text may not be in its final form and may be updated or revised in the future. Accuracy and availability may vary. The authoritative record of NPR\u2019s programming is the audio record.",
"title": "Homeowner Questions Exceedingly High Electric Bill",
"title_page": null,
"title_rss": null,
"url": "https://www.npr.org/2017/12/26/573464352/homeowner-questions-exceedingly-high-electric-bill",
"newsCluster": {
"CategoryId": 2,
"Category": "business",
"TopicId": 1,
"Topic": "unspecific",
"EventId": 5,
"Event": "woman_gets_284_billion_electric_bil",
"Url": "https://www.npr.org/2017/12/26/573464352/homeowner-questions-exceedingly-high-electric-bill"
},
"dId": "cce7bc80bd4a3d4275807218daeb7b8b84521e90a9a464ce2fd0795f"
} | 746 |
2,225 | {
"fontId": "dejavu-math",
"fontName": "DejaVu Math",
"subsets": ["latin"],
"weights": [400],
"styles": ["normal"],
"defSubset": "latin",
"variable": false,
"lastModified": "2021-08-19",
"version": "v2.37",
"category": "display",
"source": "https://github.com/dejavu-fonts/dejavu-fonts/tree/master",
"license": "https://github.com/dejavu-fonts/dejavu-fonts/blob/master/LICENSE",
"type": "other"
}
| 186 |
592 | //
// MMBounceButton.h
// LooseLeaf
//
// Created by <NAME> on 9/1/13.
// Copyright (c) 2013 Milestone Made, LLC. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface MMBounceButton : UIButton
@property (nonatomic, assign) CGFloat rotation;
- (UIColor*)borderColor;
- (UIColor*)backgroundColor;
- (CGAffineTransform)rotationTransform;
- (void)bounceButton;
- (BOOL)shouldBounce;
@end
| 156 |
4,028 | <reponame>timgates42/python-prompt-toolkit<gh_stars>1000+
#!/usr/bin/env python
"""
Demonstration of a custom completer class and the possibility of styling
completions independently by passing formatted text objects to the "display"
and "display_meta" arguments of "Completion".
"""
from prompt_toolkit.completion import Completer, Completion
from prompt_toolkit.formatted_text import HTML
from prompt_toolkit.shortcuts import CompleteStyle, prompt
animals = [
"alligator",
"ant",
"ape",
"bat",
"bear",
"beaver",
"bee",
"bison",
"butterfly",
"cat",
"chicken",
"crocodile",
"dinosaur",
"dog",
"dolphin",
"dove",
"duck",
"eagle",
"elephant",
]
animal_family = {
"alligator": "reptile",
"ant": "insect",
"ape": "mammal",
"bat": "mammal",
"bear": "mammal",
"beaver": "mammal",
"bee": "insect",
"bison": "mammal",
"butterfly": "insect",
"cat": "mammal",
"chicken": "bird",
"crocodile": "reptile",
"dinosaur": "reptile",
"dog": "mammal",
"dolphin": "mammal",
"dove": "bird",
"duck": "bird",
"eagle": "bird",
"elephant": "mammal",
}
family_colors = {
"mammal": "ansimagenta",
"insect": "ansigreen",
"reptile": "ansired",
"bird": "ansiyellow",
}
meta = {
"alligator": HTML(
"An <ansired>alligator</ansired> is a <u>crocodilian</u> in the genus Alligator of the family Alligatoridae."
),
"ant": HTML(
"<ansired>Ants</ansired> are eusocial <u>insects</u> of the family Formicidae."
),
"ape": HTML(
"<ansired>Apes</ansired> (Hominoidea) are a branch of Old World tailless anthropoid catarrhine <u>primates</u>."
),
"bat": HTML("<ansired>Bats</ansired> are mammals of the order <u>Chiroptera</u>."),
"bee": HTML(
"<ansired>Bees</ansired> are flying <u>insects</u> closely related to wasps and ants."
),
"beaver": HTML(
"The <ansired>beaver</ansired> (genus Castor) is a large, primarily <u>nocturnal</u>, semiaquatic <u>rodent</u>."
),
"bear": HTML(
"<ansired>Bears</ansired> are carnivoran <u>mammals</u> of the family Ursidae."
),
"butterfly": HTML(
"<ansiblue>Butterflies</ansiblue> are <u>insects</u> in the macrolepidopteran clade Rhopalocera from the order Lepidoptera."
),
# ...
}
class AnimalCompleter(Completer):
def get_completions(self, document, complete_event):
word = document.get_word_before_cursor()
for animal in animals:
if animal.startswith(word):
if animal in animal_family:
family = animal_family[animal]
family_color = family_colors.get(family, "default")
display = HTML(
"%s<b>:</b> <ansired>(<"
+ family_color
+ ">%s</"
+ family_color
+ ">)</ansired>"
) % (animal, family)
else:
display = animal
yield Completion(
animal,
start_position=-len(word),
display=display,
display_meta=meta.get(animal),
)
def main():
# Simple completion menu.
print("(The completion menu displays colors.)")
prompt("Type an animal: ", completer=AnimalCompleter())
# Multi-column menu.
prompt(
"Type an animal: ",
completer=AnimalCompleter(),
complete_style=CompleteStyle.MULTI_COLUMN,
)
# Readline-like
prompt(
"Type an animal: ",
completer=AnimalCompleter(),
complete_style=CompleteStyle.READLINE_LIKE,
)
if __name__ == "__main__":
main()
| 1,815 |
634 | <filename>modules/boot/container-api/src/main/java/consulo/container/plugin/util/PlatformServiceLocator.java
/*
* Copyright 2013-2019 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.container.plugin.util;
import consulo.container.StartupError;
import consulo.container.plugin.PluginDescriptor;
import consulo.container.plugin.PluginIds;
import consulo.container.plugin.PluginManager;
import javax.annotation.Nonnull;
import java.util.Iterator;
import java.util.ServiceLoader;
/**
* @author VISTALL
* @since 2019-07-25
*/
public class PlatformServiceLocator {
@Nonnull
public static <T> T findImplementation(@Nonnull Class<T> interfaceClass) {
for (T value : ServiceLoader.load(interfaceClass, interfaceClass.getClassLoader())) {
return value;
}
for (PluginDescriptor descriptor : PluginManager.getPlugins()) {
if (PluginIds.isPlatformImplementationPlugin(descriptor.getPluginId())) {
ServiceLoader<T> loader = ServiceLoader.load(interfaceClass, descriptor.getPluginClassLoader());
Iterator<T> iterator = loader.iterator();
if (iterator.hasNext()) {
return iterator.next();
}
}
}
throw new StartupError("Can't find platform implementation: " + interfaceClass);
}
}
| 548 |
4,807 | <reponame>jamespeapen/polybar
#pragma once
#include "common.hpp"
POLYBAR_NS
namespace scope_util {
/**
* Creates a wrapper that will trigger given callback when
* leaving the object's scope (i.e, when it gets destroyed)
*
* Example usage:
* @code cpp
* {
* on_exit handler([]{ ... });
* ...
* }
* @endcode
*/
class on_exit {
public:
on_exit(const function<void(void)>& fn) : m_callback(fn) {}
virtual ~on_exit() {
m_callback();
}
protected:
function<void(void)> m_callback;
};
} // namespace scope_util
POLYBAR_NS_END
| 257 |
1,936 | #include <algorithm>
#include <functional>
#include <vector>
#include <Eigen/Core>
#include <maplab-common/test/testing-entrypoint.h>
#include <maplab-common/test/testing-predicates.h>
#include <product-quantization/learn-product-quantization.h>
namespace product_quantization {
TEST(LearnProductQuantizationTest, EigenvalueAllocationWorks) {
Eigen::MatrixXf rotation_matrix;
rotation_matrix.setIdentity(6, 6);
std::vector<float> variances = {3.0, 1.0, 2.0, 4.0, 7.0, 1.5};
Eigen::MatrixXf expected_matrix_one_component(6, 6);
expected_matrix_one_component << 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0;
Eigen::MatrixXf permutated_rotation;
EigenvalueAllocation(rotation_matrix, variances, 1, &permutated_rotation);
EXPECT_NEAR_EIGEN(expected_matrix_one_component, permutated_rotation, 0.0);
Eigen::MatrixXf expected_matrix_two_components(6, 6);
expected_matrix_two_components << 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0;
EigenvalueAllocation(rotation_matrix, variances, 2, &permutated_rotation);
EXPECT_NEAR_EIGEN(expected_matrix_two_components, permutated_rotation, 0.0);
Eigen::MatrixXf expected_matrix_three_components(6, 6);
expected_matrix_three_components << 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0;
EigenvalueAllocation(rotation_matrix, variances, 3, &permutated_rotation);
EXPECT_NEAR_EIGEN(expected_matrix_three_components, permutated_rotation, 0.0);
}
TEST(LearnProductQuantizationTest, ComputePCARotationWorks) {
Eigen::MatrixXf data_points(3, 7);
data_points << 0.16218, 0.52853, 0.26297, 0.74815, 0.22898, 0.82582, 0.078176,
0.79428, 0.16565, 0.65408, 0.45054, 0.91334, 0.53834, 0.44268, 0.31122,
0.60198, 0.68921, 0.083821, 0.15238, 0.99613, 0.10665;
Eigen::MatrixXf expected_rotation(3, 3);
expected_rotation << 0.58313, -0.30308, 0.75372, -0.46414, 0.63716, 0.6153,
0.66673, 0.70864, -0.23088;
std::vector<float> expected_variances = {0.166135221203798, 0.067722866745168,
0.037692582036796};
Eigen::MatrixXf rotation_matrix;
std::vector<float> variances;
ComputePCARotation(data_points, &rotation_matrix, &variances);
// For convenience, we use the EigenvalueAllocation algorithm to sort the
// rotation matrix based on the variances. This simplifies the comparison with
// the expected rotation and variances.
Eigen::MatrixXf sorted_rotation_matrix;
EigenvalueAllocation(rotation_matrix, variances, 1, &sorted_rotation_matrix);
// Correct the signs of the rotation matrix.
for (int i = 0; i < 3; ++i) {
if (sorted_rotation_matrix.row(i).dot(expected_rotation.row(i)) < 0.0) {
sorted_rotation_matrix.row(i) *= -1.0f;
}
}
EXPECT_NEAR_EIGEN(expected_rotation, sorted_rotation_matrix, 1e-4);
// Compares the eigenvalues / variances.
std::sort(variances.begin(), variances.end(), std::greater<float>());
EXPECT_NEAR(expected_variances[0], variances[0], 1e-3);
EXPECT_NEAR(expected_variances[1], variances[1], 1e-3);
EXPECT_NEAR(expected_variances[2], variances[2], 1e-3);
}
} // namespace product_quantization
MAPLAB_UNITTEST_ENTRYPOINT
| 1,674 |
347 | /*
* Copyright oVirt Authors
* SPDX-License-Identifier: Apache-2.0
*/
package org.ovirt.engine.api.restapi.resource.openstack;
import javax.ws.rs.core.Response;
import org.ovirt.engine.api.model.OpenstackVolumeAuthenticationKey;
import org.ovirt.engine.api.resource.openstack.OpenstackVolumeAuthenticationKeyResource;
import org.ovirt.engine.api.restapi.resource.AbstractBackendActionableResource;
import org.ovirt.engine.core.common.businessentities.storage.LibvirtSecret;
public class BackendOpenStackVolumeAuthenticationKeyResource
extends AbstractBackendActionableResource<OpenstackVolumeAuthenticationKey, LibvirtSecret>
implements OpenstackVolumeAuthenticationKeyResource {
private String providerId;
protected BackendOpenStackVolumeAuthenticationKeyResource(String providerId, String id) {
super(id, OpenstackVolumeAuthenticationKey.class, LibvirtSecret.class);
this.providerId = providerId;
}
@Override
public OpenstackVolumeAuthenticationKey get() {
throw new UnsupportedOperationException("Cinder integration replaced by Managed Block Storage.\n"
+ "Please use Managed Block Storage for creating Cinderlib based storage domain.");
}
@Override
public OpenstackVolumeAuthenticationKey update(OpenstackVolumeAuthenticationKey resource) {
throw new UnsupportedOperationException("Cinder integration replaced by Managed Block Storage.\n"
+ "Please use Managed Block Storage for creating Cinderlib based storage domain.");
}
@Override
public Response remove() {
throw new UnsupportedOperationException("Cinder integration replaced by Managed Block Storage.\n"
+ "Please use Managed Block Storage for creating Cinderlib based storage domain.");
}
}
| 556 |
409 | <gh_stars>100-1000
package org.cloud.sonic.controller.feign;
import com.alibaba.fastjson.JSONObject;
import org.cloud.sonic.common.http.RespModel;
import org.cloud.sonic.controller.feign.fallback.TransportFeignClientFallBack;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
/**
* @author ZhouYiXun
* @des 通过feign负载均衡调用transport模块接口
* @date 2021/8/21 16:51
*/
@FeignClient(value = "sonic-server-transport", fallback = TransportFeignClientFallBack.class)
public interface TransportFeignClient {
@PostMapping("/exchange/sendTestData")
RespModel sendTestData(@RequestBody JSONObject jsonObject);
}
| 270 |
478 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#pragma once
#include "AdaptiveImageRenderer.g.h"
#include "Image.h"
namespace winrt::AdaptiveCards::Rendering::Uwp::implementation
{
struct AdaptiveImageRenderer : AdaptiveImageRendererT<AdaptiveImageRenderer>
{
public:
AdaptiveImageRenderer(){};
AdaptiveImageRenderer(winrt::com_ptr<::AdaptiveCards::Rendering::Uwp::XamlBuilder> xamlBuilder);
winrt::UIElement Render(winrt::IAdaptiveCardElement const& cardElement,
winrt::AdaptiveRenderContext const& renderContext,
winrt::AdaptiveRenderArgs const& renderArgs);
private:
winrt::com_ptr<::AdaptiveCards::Rendering::Uwp::XamlBuilder> m_xamlBuilder;
};
}
namespace winrt::AdaptiveCards::Rendering::Uwp::factory_implementation
{
struct AdaptiveImageRenderer : AdaptiveImageRendererT<AdaptiveImageRenderer, implementation::AdaptiveImageRenderer>
{
};
}
| 451 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_WEBUI_UTIL_IMAGE_UTIL_H_
#define CHROME_BROWSER_UI_WEBUI_UTIL_IMAGE_UTIL_H_
#include "base/containers/span.h"
#include "base/strings/string_piece_forward.h"
namespace gfx {
class ImageSkia;
} // namespace gfx
namespace webui {
std::string MakeDataURIForImage(base::span<const uint8_t> image_data,
base::StringPiece mime_subtype);
std::string EncodePNGAndMakeDataURI(gfx::ImageSkia image, float scale_factor);
} // namespace webui
#endif // CHROME_BROWSER_UI_WEBUI_UTIL_IMAGE_UTIL_H_
| 299 |
446 | # Generated by Django 2.0 on 2018-07-29 01:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("events", "0036_auto_20180701_2148")]
operations = [
migrations.AddField(
model_name="organization",
name="cover_img",
field=models.ImageField(
blank=True,
null=True,
upload_to="org_covers",
verbose_name="Cover Image",
),
),
migrations.AddField(
model_name="organization",
name="description",
field=models.TextField(blank=True, null=True),
),
]
| 344 |
3,348 | <reponame>wromansky/incubator-heron<filename>heron/uploaders/src/java/org/apache/heron/uploader/hdfs/HdfsUploader.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.uploader.hdfs;
import java.io.File;
import java.net.URI;
import java.util.logging.Logger;
import org.apache.heron.common.basics.TypeUtils;
import org.apache.heron.spi.common.Config;
import org.apache.heron.spi.common.Context;
import org.apache.heron.spi.uploader.IUploader;
import org.apache.heron.spi.uploader.UploaderException;
import org.apache.heron.spi.utils.UploaderUtils;
public class HdfsUploader implements IUploader {
private static final Logger LOG = Logger.getLogger(HdfsUploader.class.getName());
// get the directory containing the file
private String destTopologyDirectoryURI;
private Config config;
private String topologyPackageLocation;
private URI packageURI;
// The controller on HDFS
private HdfsController controller;
@Override
public void initialize(Config ipconfig) {
this.config = ipconfig;
// Instantiate the HDFS controller
this.controller = getHdfsController();
this.destTopologyDirectoryURI = HdfsContext.hdfsTopologiesDirectoryURI(config);
// get the original topology package location
this.topologyPackageLocation = Context.topologyPackageFile(config);
// name of the destination file is the same as the base name of the topology package file
String fileName =
UploaderUtils.generateFilename(
Context.topologyName(config), Context.role(config));
packageURI = TypeUtils.getURI(String.format("%s/%s", destTopologyDirectoryURI, fileName));
}
// Utils method
protected HdfsController getHdfsController() {
return new HdfsController(
HdfsContext.hadoopConfigDirectory(config), Context.verbose(config));
}
// Utils method
protected boolean isLocalFileExists(String file) {
return new File(file).isFile();
}
@Override
public URI uploadPackage() throws UploaderException {
// first, check if the topology package exists
if (!isLocalFileExists(topologyPackageLocation)) {
throw new UploaderException(
String.format("Expected topology package file to be uploaded does not exist at '%s'",
topologyPackageLocation));
}
// if the dest directory does not exist, create it.
if (!controller.exists(destTopologyDirectoryURI)) {
LOG.info(String.format(
"The destination directory does not exist. Creating it now at URI '%s'",
destTopologyDirectoryURI));
if (!controller.mkdirs(destTopologyDirectoryURI)) {
throw new UploaderException(
String.format("Failed to create directory for topology package at URI '%s'",
destTopologyDirectoryURI));
}
} else {
// if the destination file exists, write a log message
LOG.info(String.format("Target topology file already exists at '%s'. Overwriting it now",
packageURI.toString()));
}
// copy the topology package to target working directory
LOG.info(String.format("Uploading topology package at '%s' to target HDFS at '%s'",
topologyPackageLocation, packageURI.toString()));
if (!controller.copyFromLocalFile(topologyPackageLocation, packageURI.toString())) {
throw new UploaderException(
String.format("Failed to upload the topology package at '%s' to: '%s'",
topologyPackageLocation, packageURI.toString()));
}
return packageURI;
}
@Override
public boolean undo() {
return controller.delete(packageURI.toString());
}
@Override
public void close() {
// Nothing to do here
}
}
| 1,427 |
403 | package com.xcompany.xproject.auth.server;
import javax.servlet.http.HttpServletResponse;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
@Configuration
@EnableResourceServer
//@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true)
@Order(2)
public class OAuth2ResourceServerConfig extends ResourceServerConfigurerAdapter {
// @Autowired
// @Qualifier("authenticationManagerBean")
// private AuthenticationManager authenticationManager;
/*@Autowired
private DataSource dataSource;*/
/*@Bean
public TokenStore tokenStore() {
return new JdbcTokenStore(dataSource);
}*/
// @Autowired
// private TokenStore tokenStore;
// @Override
// public void configure(ResourceServerSecurityConfigurer resources)
// throws Exception {
// resources.tokenStore(tokenStore());
// }
// @Autowired
// RedisConnectionFactory redisConnectionFactory;
//
// @Bean
// public TokenStore tokenStore() {
// return new RedisTokenStore(redisConnectionFactory);
// }
//
// @Override
// public void configure(ResourceServerSecurityConfigurer resources)
// throws Exception {
// resources.tokenStore(tokenStore());
// }
// @Override
// public void configure(ResourceServerSecurityConfigurer resources) {
// //resources.resourceId(DEMO_RESOURCE_ID).stateless(true);
// //resources.stateless(true);
// resources.authenticationManager(authenticationManager);
// }
@Override
public void configure(HttpSecurity http) throws Exception {
//http.setSharedObject(AuthenticationManager.class, authenticationManager);
// http.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS);
http
//.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.IF_REQUIRED)
//.and()
//.csrf().disable()
//.and()
.exceptionHandling()
.authenticationEntryPoint((request, response, authException) -> response.sendError(HttpServletResponse.SC_UNAUTHORIZED))
// .and()
// .requestMatchers()
// .antMatchers("/user")
.and()
.authorizeRequests()
//.antMatchers("/login").anonymous()
//.antMatchers("/login").permitAll()
.anyRequest().authenticated();
// http
// .authorizeRequests()
// .antMatchers(HttpMethod.GET, "/**").access("#oauth2.hasScope('read')")
// .antMatchers(HttpMethod.POST, "/**").access("#oauth2.hasScope('write')")
// .antMatchers(HttpMethod.PATCH, "/**").access("#oauth2.hasScope('write')")
// .antMatchers(HttpMethod.PUT, "/**").access("#oauth2.hasScope('write')")
// .antMatchers(HttpMethod.DELETE, "/**").access("#oauth2.hasScope('write')")
// .antMatchers(HttpMethod.OPTIONS, "/**").permitAll();
}
/*@Bean
@Primary
public Boolean oauth2StatelessSecurityContext() {
return Boolean.FALSE;
}*/
}
| 1,291 |
328 | /*
* This file is part of GreatFET
*
* GPIO Interrupt support.
*/
#include "gpio_int.h"
#include <libopencm3/lpc43xx/m4/nvic.h>
#include <drivers/arm_vectors.h>
// Maps IRQs to their interrupts.
static const int irq_for_interrupt[] = {
NVIC_PIN_INT0_IRQ,
NVIC_PIN_INT1_IRQ,
NVIC_PIN_INT2_IRQ,
NVIC_PIN_INT3_IRQ,
NVIC_PIN_INT4_IRQ,
NVIC_PIN_INT5_IRQ,
NVIC_PIN_INT6_IRQ,
NVIC_PIN_INT7_IRQ,
};
/**
* Configures a GPIO pin change interrupt in the NVIC, but does not configure
* the it in the SCU or GPIO interrupt block.
*
* @param interrupt_number The GPIO INT#, from 0-7.
* @param isr The interrupt service routine that will be invoked when the IRQ is raised.
* @param interrupt_priority The interrupt priority. Takes the same format as nvic_set_priority.
*/
static void _nvic_set_up_pin_change_interrupt(int interrupt_number,
void *isr, int interrupt_priority)
{
// Determine the NVIC IRQ line tied to our GPIO interrupt.
uint8_t irq = irq_for_interrupt[interrupt_number];
// Set the location of our interrupt handler, and the interrupt priority.
vector_table.irqs[irq] = isr;
nvic_set_priority(irq, interrupt_priority);
}
/**
* Routes a GPIO pin through the SCU to the GPIO interrupt block.
*
* @param interrupt_number The input in the interrupt block to which the GPIO should be routed.
* @param port_number The number of the port whose signal is to be routed.
* @param pin_number The pin number of the signal to be routed.
*/
static void _route_gpio_to_interrupt_block(int interrupt_number,
int port_number, int pin_number)
{
uint32_t offset_into_register;
volatile uint32_t *interrupt_select_register;
uint32_t value_to_write;
// Sanitize the port and pin numbers, ensuring they don't touch
// unintended parts of the register.
port_number &= 0x0F;
pin_number &= 0x0F;
// Determine the location of the SCU_PINTSEL registers that handle routing of
// GPIO signals to the GPIO interrupt block.
interrupt_select_register = (interrupt_number > 3) ? &SCU_PINTSEL1 : &SCU_PINTSEL0;
offset_into_register = (interrupt_number % 4) * 8;
// Figure out the value that will need to be written into the given register...
value_to_write = (port_number << 5) | pin_number;
// ... and write that into the given register section.
*interrupt_select_register &= ~(0xFF << offset_into_register);
*interrupt_select_register |= (value_to_write << offset_into_register);
}
/**
* Configures a pin change interrupt inside the GPIO interrupt block.
*
* @param interrupt_number The interrupt number to configure.
* @param sensitivity Determines whether the interrupt will be level or edge sensitive.
*/
static void _gpio_set_up_pin_change_interrupt(int interrupt_number,
gpio_interrupt_sensitivity_t sensitivity)
{
// Create a mask for configuration of the GPIO interrupt block.
const uint32_t mask = (1 << interrupt_number);
// Set the interrupt as either level or edge sensitive.
if((sensitivity == LEVEL_SENSITIVE_LOW) || (sensitivity == LEVEL_SENSITIVE_HIGH)) {
GPIO_PIN_INTERRUPT_ISEL |= mask; // level sensitive
} else {
GPIO_PIN_INTERRUPT_ISEL &= ~mask; // edge sensitive
}
// Enable the various types of interrupt depending on the requested sensitivyt.
if(sensitivity == EDGE_SENSITIVE_BOTH) {
GPIO_PIN_INTERRUPT_SIENR = mask; // trigger on rising edges
GPIO_PIN_INTERRUPT_SIENF = mask; // and also on falling edges
}
else if((sensitivity == EDGE_SENSITIVE_RISING) || (sensitivity == LEVEL_SENSITIVE_HIGH)) {
GPIO_PIN_INTERRUPT_SIENR = mask; // trigger on rising edges / high levels
GPIO_PIN_INTERRUPT_CIENF = mask; // don't trigger on falling edges / low levels
}
else {
GPIO_PIN_INTERRUPT_CIENR = mask; // don't trigger on rising edges / high levels
GPIO_PIN_INTERRUPT_SIENF = mask; // trigger on falling edges / low levels
}
}
/**
* Configures a (non-group) GPIO pin-change interrupt. This does not inherently set
* up the SCU to use the given pin as a GPIO, nor does this set the given GPIO to input mode.
* These should likely be done independently before calling this function. Ths function leaves
* the relevant interrupt disabled; if you want to use, you'll need to call gpio_interrupt_enable.
*
* @param interrupt_number The GPIO INT#, from 0-7. This value has no relation to the GPIO pin,
* but instead indicates which of eight equivalent resources will be used.
* @param port_number The number of the port whose signal is to be routed.
* @param pin_number The pin number of the signal to be routed.
* @param sensitivity Determines whether the interrupt will be level or edge sensitive.
* @param isr The interrupt service routine that will be invoked when the IRQ is raised.
* @param interrupt_priority The interrupt priority. Takes the same format as nvic_set_priority.
*/
void gpio_interrupt_configure(int gpio_int_number, int port_number,
int pin_number, gpio_interrupt_sensitivity_t sensitivity,
void *isr, int interrupt_priority)
{
// Ensure the interrupt's not enabled while we change it.
gpio_interrupt_disable(gpio_int_number);
// Set the interrupt priority and masking in the NVIC,
// as well as the interrupt handler.
_nvic_set_up_pin_change_interrupt(gpio_int_number, isr, interrupt_priority);
// Set SCU to route pin-change interrupts requests from the
// relevent pin to the NVIC.
_route_gpio_to_interrupt_block(gpio_int_number, port_number, pin_number);
// Set up the interrupt trigger conditions on the GPIO pin itself.
_gpio_set_up_pin_change_interrupt(gpio_int_number, sensitivity);
}
/**
* Enables a given GPIO interrupt previously configured e.g. with gpio_interrupt_configure.
*
* @param interrupt_number The GPIO INT#, from 0-7. This value has no relation to the GPIO pin,
* but instead indicates which of eight equivalent resources will be used.
*/
void gpio_interrupt_enable(int interrupt_number)
{
// Determine the NVIC IRQ line tied to our GPIO interrupt...
uint8_t irq = irq_for_interrupt[interrupt_number];
// ... and enable the interrupt at the requested priority.
nvic_enable_irq(irq);
}
/**
* Disables a given GPIO interrupt previously configured e.g. with gpio_interrupt_configure.
*
* @param interrupt_number The GPIO INT#, from 0-7. This value has no relation to the GPIO pin,
* but instead indicates which of eight equivalent resources will be used.
*/
void gpio_interrupt_disable(int interrupt_number)
{
// Determine the NVIC IRQ line tied to our GPIO interrupt...
uint8_t irq = irq_for_interrupt[interrupt_number];
// ... and enable the interrupt at the requested priority.
nvic_disable_irq(irq);
}
| 2,239 |
918 | // This is a generated file! Please edit source .ksy file and use
// kaitai-struct-compiler to rebuild
#include "kaitai/elf.h"
namespace veles {
namespace kaitai {
namespace elf {
elf_t::elf_t(kaitai::kstream* p_io, kaitai::kstruct* p_parent, elf_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = this;
m__io->popName();
veles_obj = m__io->startChunk("elf");
f_program_headers = false;
f_section_headers = false;
f_strings = false;
m__io->pushName("file_header");
m_file_header = new file_header_t(m__io, this, m__root);
m__io->popName();
m__io->endChunk();
}
elf_t::~elf_t() {
delete m_file_header;
delete m__skip_me_program_headers;
for (std::vector<program_header_t*>::iterator it = m_program_headers->begin();
it != m_program_headers->end(); ++it) {
delete *it;
}
delete m_program_headers;
delete m__skip_me_section_headers;
for (std::vector<section_header_t*>::iterator it = m_section_headers->begin();
it != m_section_headers->end(); ++it) {
delete *it;
}
delete m_section_headers;
if (f_strings) {
delete m__io__skip_me_strings;
delete m_strings;
}
}
elf_t::file_header_t::file_header_t(kaitai::kstream* p_io, elf_t* p_parent,
elf_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("file_header");
m__io->pushName("m_magic" + 2);
m_magic = m__io->ensure_fixed_contents(std::string("\x7F\x45\x4C\x46", 4));
m__io->popName();
m__io->pushName("bits");
m_bits = static_cast<elf_t::bits_t>(m__io->read_u1());
m__io->popName();
m__io->pushName("endian");
m_endian = static_cast<elf_t::endian_t>(m__io->read_u1());
m__io->popName();
m__io->pushName("ei_version");
m_ei_version = m__io->read_u1();
m__io->popName();
m__io->pushName("abi");
m_abi = static_cast<elf_t::os_abi_t>(m__io->read_u1());
m__io->popName();
m__io->pushName("abi_version");
m_abi_version = m__io->read_u1();
m__io->popName();
m__io->pushName("pad");
m_pad = m__io->read_bytes(7);
m__io->popName();
m__io->pushName("e_type");
m_e_type = static_cast<elf_t::obj_type_t>(m__io->read_u2le());
m__io->popName();
m__io->pushName("machine");
m_machine = static_cast<elf_t::machine_t>(m__io->read_u2le());
m__io->popName();
m__io->pushName("e_version");
m_e_version = m__io->read_u4le();
m__io->popName();
switch (bits()) {
case BITS_B32:
m__io->pushName("entry_point");
m_entry_point = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("entry_point");
m_entry_point = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (bits()) {
case BITS_B32:
m__io->pushName("program_header_offset");
m_program_header_offset = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("program_header_offset");
m_program_header_offset = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (bits()) {
case BITS_B32:
m__io->pushName("section_header_offset");
m_section_header_offset = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("section_header_offset");
m_section_header_offset = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
m__io->pushName("flags");
m_flags = m__io->read_bytes(4);
m__io->popName();
m__io->pushName("e_ehsize");
m_e_ehsize = m__io->read_u2le();
m__io->popName();
m__io->pushName("program_header_entry_size");
m_program_header_entry_size = m__io->read_u2le();
m__io->popName();
m__io->pushName("qty_program_header");
m_qty_program_header = m__io->read_u2le();
m__io->popName();
m__io->pushName("section_header_entry_size");
m_section_header_entry_size = m__io->read_u2le();
m__io->popName();
m__io->pushName("qty_section_header");
m_qty_section_header = m__io->read_u2le();
m__io->popName();
m__io->pushName("section_names_idx");
m_section_names_idx = m__io->read_u2le();
m__io->popName();
m__io->endChunk();
}
elf_t::file_header_t::~file_header_t() {}
elf_t::program_header_t::program_header_t(kaitai::kstream* p_io,
elf_t* p_parent, elf_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("program_header");
m__io->pushName("type");
m_type = static_cast<elf_t::ph_type_t>(m__io->read_u4le());
m__io->popName();
n_flags64 = true;
if (_root()->file_header()->bits() == BITS_B64) {
n_flags64 = false;
m__io->pushName("flags64");
m_flags64 = m__io->read_u4le();
m__io->popName();
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("offset");
m_offset = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("offset");
m_offset = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("vaddr");
m_vaddr = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("vaddr");
m_vaddr = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("paddr");
m_paddr = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("paddr");
m_paddr = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("filesz");
m_filesz = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("filesz");
m_filesz = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("memsz");
m_memsz = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("memsz");
m_memsz = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
n_flags32 = true;
if (_root()->file_header()->bits() == BITS_B32) {
n_flags32 = false;
m__io->pushName("flags32");
m_flags32 = m__io->read_u4le();
m__io->popName();
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("align");
m_align = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("align");
m_align = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
m__io->endChunk();
}
elf_t::program_header_t::~program_header_t() {}
elf_t::section_header_t::section_header_t(kaitai::kstream* p_io,
elf_t* p_parent, elf_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("section_header");
f_body = false;
f_name = false;
m__io->pushName("name_offset");
m_name_offset = m__io->read_u4le();
m__io->popName();
m__io->pushName("type");
m_type = static_cast<elf_t::sh_type_t>(m__io->read_u4le());
m__io->popName();
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("flags");
m_flags = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("flags");
m_flags = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("addr");
m_addr = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("addr");
m_addr = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("offset");
m_offset = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("offset");
m_offset = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("size");
m_size = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("size");
m_size = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
m__io->pushName("linked_section_idx");
m_linked_section_idx = m__io->read_u4le();
m__io->popName();
m__io->pushName("info");
m_info = m__io->read_bytes(4);
m__io->popName();
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("align");
m_align = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("align");
m_align = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
switch (_root()->file_header()->bits()) {
case BITS_B32:
m__io->pushName("entry_size");
m_entry_size = m__io->read_u4le();
m__io->popName();
break;
case BITS_B64:
m__io->pushName("entry_size");
m_entry_size = m__io->read_u8le();
m__io->popName();
break;
default:
break;
}
m__io->endChunk();
}
elf_t::section_header_t::~section_header_t() {}
std::vector<uint8_t> elf_t::section_header_t::body() {
if (f_body) {
return m_body;
}
m__io->pushName("body");
kaitai::kstream* io = _root()->_io();
auto saved_io = io;
auto saved_veles_obj = veles_obj;
io = new kaitai::kstream(saved_io->blob(), offset(), veles_obj, 0,
saved_io->error());
veles_obj = io->startChunk(saved_io->currentName());
m__io->pushName("body");
m_body = io->read_bytes(size());
m__io->popName();
io->endChunk();
delete io;
veles_obj = saved_veles_obj;
f_body = true;
m__io->popName();
return m_body;
}
std::string elf_t::section_header_t::name() {
if (f_name) {
return m_name;
}
m__io->pushName("name");
kaitai::kstream* io = _root()->strings()->_io();
auto saved_io = io;
auto saved_veles_obj = veles_obj;
io = new kaitai::kstream(saved_io->blob(), name_offset(), veles_obj, 0,
saved_io->error());
veles_obj = io->startChunk(saved_io->currentName());
m__io->pushName("name");
m_name = io->read_strz("ASCII", 0, false, true, true);
m__io->popName();
io->endChunk();
delete io;
veles_obj = saved_veles_obj;
f_name = true;
m__io->popName();
return m_name;
}
elf_t::strings_t::strings_t(kaitai::kstream* p_io, elf_t* p_parent,
elf_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("strings");
m_entries = new std::vector<std::string>();
while (!m__io->is_eof()) {
m__io->pushName("entries");
m_entries->push_back(m__io->read_strz("ASCII", 0, false, true, true));
m__io->popName();
}
m__io->endChunk();
}
elf_t::strings_t::~strings_t() { delete m_entries; }
std::vector<elf_t::program_header_t*>* elf_t::program_headers() {
if (f_program_headers) {
return m_program_headers;
}
m__io->pushName("program_headers");
auto saved_io = m__io;
auto saved_veles_obj = veles_obj;
m__io = new kaitai::kstream(saved_io->blob(),
file_header()->program_header_offset(), veles_obj,
0, saved_io->error());
veles_obj = m__io->startChunk(saved_io->currentName());
int l_program_headers = file_header()->qty_program_header();
m__skip_me_program_headers = new std::vector<std::vector<uint8_t>>();
m__skip_me_program_headers->reserve(l_program_headers);
m_program_headers = new std::vector<program_header_t*>();
m_program_headers->reserve(l_program_headers);
for (int i = 0; i < l_program_headers; i++) {
m__io->pushName("_skip_me_program_headers");
m__skip_me_program_headers->push_back(
m__io->read_bytes(file_header()->program_header_entry_size()));
m__io->popName();
m__io->pushName(
"m__skip_me_program_headers->at(m__skip_me_program_headers->size() - "
"1)" +
3);
m__io__skip_me_program_headers = new kaitai::kstream(
m__io->blob(),
m__io->pos() - m__skip_me_program_headers
->at(m__skip_me_program_headers->size() - 1)
.size(),
veles_obj, m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("program_headers");
m_program_headers->push_back(
new program_header_t(m__io__skip_me_program_headers, this, m__root));
m__io->popName();
}
m__io->endChunk();
delete m__io;
veles_obj = saved_veles_obj;
m__io = saved_io;
f_program_headers = true;
m__io->popName();
return m_program_headers;
}
std::vector<elf_t::section_header_t*>* elf_t::section_headers() {
if (f_section_headers) {
return m_section_headers;
}
m__io->pushName("section_headers");
auto saved_io = m__io;
auto saved_veles_obj = veles_obj;
m__io = new kaitai::kstream(saved_io->blob(),
file_header()->section_header_offset(), veles_obj,
0, saved_io->error());
veles_obj = m__io->startChunk(saved_io->currentName());
int l_section_headers = file_header()->qty_section_header();
m__skip_me_section_headers = new std::vector<std::vector<uint8_t>>();
m__skip_me_section_headers->reserve(l_section_headers);
m_section_headers = new std::vector<section_header_t*>();
m_section_headers->reserve(l_section_headers);
for (int i = 0; i < l_section_headers; i++) {
m__io->pushName("_skip_me_section_headers");
m__skip_me_section_headers->push_back(
m__io->read_bytes(file_header()->section_header_entry_size()));
m__io->popName();
m__io->pushName(
"m__skip_me_section_headers->at(m__skip_me_section_headers->size() - "
"1)" +
3);
m__io__skip_me_section_headers = new kaitai::kstream(
m__io->blob(),
m__io->pos() - m__skip_me_section_headers
->at(m__skip_me_section_headers->size() - 1)
.size(),
veles_obj, m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("section_headers");
m_section_headers->push_back(
new section_header_t(m__io__skip_me_section_headers, this, m__root));
m__io->popName();
}
m__io->endChunk();
delete m__io;
veles_obj = saved_veles_obj;
m__io = saved_io;
f_section_headers = true;
m__io->popName();
return m_section_headers;
}
elf_t::strings_t* elf_t::strings() {
if (f_strings) {
return m_strings;
}
m__io->pushName("strings");
auto saved_io = m__io;
auto saved_veles_obj = veles_obj;
m__io = new kaitai::kstream(
saved_io->blob(),
section_headers()->at(file_header()->section_names_idx())->offset(),
veles_obj, 0, saved_io->error());
veles_obj = m__io->startChunk(saved_io->currentName());
m__io->pushName("_skip_me_strings");
m__skip_me_strings = m__io->read_bytes(
section_headers()->at(file_header()->section_names_idx())->size());
m__io->popName();
m__io->pushName("m__skip_me_strings" + 3);
m__io__skip_me_strings = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_strings.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("strings");
m_strings = new strings_t(m__io__skip_me_strings, this, m__root);
m__io->popName();
m__io->endChunk();
delete m__io;
veles_obj = saved_veles_obj;
m__io = saved_io;
f_strings = true;
m__io->popName();
return m_strings;
}
} // namespace elf
} // namespace kaitai
} // namespace veles
| 7,824 |
631 | <reponame>767248371/octo-rpc
/**
* Autogenerated by Thrift Compiler (0.8.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
#include "echo_constants.h"
namespace echo {
const echoConstants g_echo_constants;
echoConstants::echoConstants() {
}
} // namespace
| 111 |
2,338 | <filename>clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntp.c
// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
// REQUIRES: aarch64-registered-target
// RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - %s | FileCheck %s
// RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -emit-llvm -o - -x c++ %s | FileCheck %s -check-prefix=CPP-CHECK
// RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -Wall -o /dev/null %s
#include <arm_sve.h>
// CHECK-LABEL: @test_svcntp_b8(
// CHECK-NEXT: entry:
// CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv16i1(<vscale x 16 x i1> [[PG:%.*]], <vscale x 16 x i1> [[OP:%.*]])
// CHECK-NEXT: ret i64 [[TMP0]]
//
// CPP-CHECK-LABEL: @_Z14test_svcntp_b8u10__SVBool_tu10__SVBool_t(
// CPP-CHECK-NEXT: entry:
// CPP-CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv16i1(<vscale x 16 x i1> [[PG:%.*]], <vscale x 16 x i1> [[OP:%.*]])
// CPP-CHECK-NEXT: ret i64 [[TMP0]]
//
uint64_t test_svcntp_b8(svbool_t pg, svbool_t op)
{
return svcntp_b8(pg, op);
}
// CHECK-LABEL: @test_svcntp_b16(
// CHECK-NEXT: entry:
// CHECK-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> [[PG:%.*]])
// CHECK-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> [[OP:%.*]])
// CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv8i1(<vscale x 8 x i1> [[TMP0]], <vscale x 8 x i1> [[TMP1]])
// CHECK-NEXT: ret i64 [[TMP2]]
//
// CPP-CHECK-LABEL: @_Z15test_svcntp_b16u10__SVBool_tu10__SVBool_t(
// CPP-CHECK-NEXT: entry:
// CPP-CHECK-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> [[PG:%.*]])
// CPP-CHECK-NEXT: [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv8i1(<vscale x 16 x i1> [[OP:%.*]])
// CPP-CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv8i1(<vscale x 8 x i1> [[TMP0]], <vscale x 8 x i1> [[TMP1]])
// CPP-CHECK-NEXT: ret i64 [[TMP2]]
//
uint64_t test_svcntp_b16(svbool_t pg, svbool_t op)
{
return svcntp_b16(pg, op);
}
// CHECK-LABEL: @test_svcntp_b32(
// CHECK-NEXT: entry:
// CHECK-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> [[PG:%.*]])
// CHECK-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> [[OP:%.*]])
// CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv4i1(<vscale x 4 x i1> [[TMP0]], <vscale x 4 x i1> [[TMP1]])
// CHECK-NEXT: ret i64 [[TMP2]]
//
// CPP-CHECK-LABEL: @_Z15test_svcntp_b32u10__SVBool_tu10__SVBool_t(
// CPP-CHECK-NEXT: entry:
// CPP-CHECK-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> [[PG:%.*]])
// CPP-CHECK-NEXT: [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> [[OP:%.*]])
// CPP-CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv4i1(<vscale x 4 x i1> [[TMP0]], <vscale x 4 x i1> [[TMP1]])
// CPP-CHECK-NEXT: ret i64 [[TMP2]]
//
uint64_t test_svcntp_b32(svbool_t pg, svbool_t op)
{
return svcntp_b32(pg, op);
}
// CHECK-LABEL: @test_svcntp_b64(
// CHECK-NEXT: entry:
// CHECK-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> [[PG:%.*]])
// CHECK-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> [[OP:%.*]])
// CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv2i1(<vscale x 2 x i1> [[TMP0]], <vscale x 2 x i1> [[TMP1]])
// CHECK-NEXT: ret i64 [[TMP2]]
//
// CPP-CHECK-LABEL: @_Z15test_svcntp_b64u10__SVBool_tu10__SVBool_t(
// CPP-CHECK-NEXT: entry:
// CPP-CHECK-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> [[PG:%.*]])
// CPP-CHECK-NEXT: [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> [[OP:%.*]])
// CPP-CHECK-NEXT: [[TMP2:%.*]] = call i64 @llvm.aarch64.sve.cntp.nxv2i1(<vscale x 2 x i1> [[TMP0]], <vscale x 2 x i1> [[TMP1]])
// CPP-CHECK-NEXT: ret i64 [[TMP2]]
//
uint64_t test_svcntp_b64(svbool_t pg, svbool_t op)
{
return svcntp_b64(pg, op);
}
| 2,474 |
360 | <reponame>tigertv/Cube-Engine
#pragma once
namespace tzw {
struct Particle;
class ParticleEmitter;
class ParticleEmitterModule
{
public:
virtual ~ParticleEmitterModule() = default;
ParticleEmitterModule();
virtual void process(Particle * particle, ParticleEmitter * emitter);
};
} // namespace tzw | 101 |
326 | // Boost.Geometry Index
//
// Copyright (c) 2011-2013 <NAME>, <NAME>.
//
// This file was modified by Oracle on 2020.
// Modifications copyright (c) 2020 Oracle and/or its affiliates.
// Contributed and/or modified by <NAME>, on behalf of Oracle
//
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <type_traits>
#include <boost/swap.hpp>
#ifndef BOOST_GEOMETRY_INDEX_DETAIL_UTILITIES_HPP
#define BOOST_GEOMETRY_INDEX_DETAIL_UTILITIES_HPP
namespace boost { namespace geometry { namespace index { namespace detail {
template<class T>
static inline void assign_cond(T & l, T const& r, std::true_type)
{
l = r;
}
template<class T>
static inline void assign_cond(T &, T const&, std::false_type) {}
template<class T>
static inline void move_cond(T & l, T & r, std::true_type)
{
l = ::boost::move(r);
}
template<class T>
static inline void move_cond(T &, T &, std::false_type) {}
template <typename T> inline
void swap_cond(T & l, T & r, std::true_type)
{
::boost::swap(l, r);
}
template <typename T> inline
void swap_cond(T &, T &, std::false_type) {}
}}}} // namespace boost::geometry::index::detail
#endif // BOOST_GEOMETRY_INDEX_DETAIL_UTILITIES_HPP
| 482 |
66,985 | /*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web.format;
import java.time.format.DateTimeFormatter;
import java.util.function.Consumer;
import java.util.function.Supplier;
import org.springframework.format.datetime.DateFormatter;
import org.springframework.format.datetime.DateFormatterRegistrar;
import org.springframework.format.datetime.standard.DateTimeFormatterRegistrar;
import org.springframework.format.number.NumberFormatAnnotationFormatterFactory;
import org.springframework.format.number.money.CurrencyUnitFormatter;
import org.springframework.format.number.money.Jsr354NumberFormatAnnotationFormatterFactory;
import org.springframework.format.number.money.MonetaryAmountFormatter;
import org.springframework.format.support.DefaultFormattingConversionService;
import org.springframework.util.ClassUtils;
/**
* {@link org.springframework.format.support.FormattingConversionService} dedicated to web
* applications for formatting and converting values to/from the web.
* <p>
* This service replaces the default implementations provided by
* {@link org.springframework.web.servlet.config.annotation.EnableWebMvc @EnableWebMvc}
* and {@link org.springframework.web.reactive.config.EnableWebFlux @EnableWebFlux}.
*
* @author <NAME>
* @since 2.0.0
*/
public class WebConversionService extends DefaultFormattingConversionService {
private static final boolean JSR_354_PRESENT = ClassUtils.isPresent("javax.money.MonetaryAmount",
WebConversionService.class.getClassLoader());
/**
* Create a new WebConversionService that configures formatters with the provided
* date, time, and date-time formats, or registers the default if no custom format is
* provided.
* @param dateTimeFormatters the formatters to use for date, time, and date-time
* formatting
* @since 2.3.0
*/
public WebConversionService(DateTimeFormatters dateTimeFormatters) {
super(false);
if (dateTimeFormatters.isCustomized()) {
addFormatters(dateTimeFormatters);
}
else {
addDefaultFormatters(this);
}
}
private void addFormatters(DateTimeFormatters dateTimeFormatters) {
addFormatterForFieldAnnotation(new NumberFormatAnnotationFormatterFactory());
if (JSR_354_PRESENT) {
addFormatter(new CurrencyUnitFormatter());
addFormatter(new MonetaryAmountFormatter());
addFormatterForFieldAnnotation(new Jsr354NumberFormatAnnotationFormatterFactory());
}
registerJsr310(dateTimeFormatters);
registerJavaDate(dateTimeFormatters);
}
private void registerJsr310(DateTimeFormatters dateTimeFormatters) {
DateTimeFormatterRegistrar dateTime = new DateTimeFormatterRegistrar();
configure(dateTimeFormatters::getDateFormatter, dateTime::setDateFormatter);
configure(dateTimeFormatters::getTimeFormatter, dateTime::setTimeFormatter);
configure(dateTimeFormatters::getDateTimeFormatter, dateTime::setDateTimeFormatter);
dateTime.registerFormatters(this);
}
private void configure(Supplier<DateTimeFormatter> supplier, Consumer<DateTimeFormatter> consumer) {
DateTimeFormatter formatter = supplier.get();
if (formatter != null) {
consumer.accept(formatter);
}
}
private void registerJavaDate(DateTimeFormatters dateTimeFormatters) {
DateFormatterRegistrar dateFormatterRegistrar = new DateFormatterRegistrar();
String datePattern = dateTimeFormatters.getDatePattern();
if (datePattern != null) {
DateFormatter dateFormatter = new DateFormatter(datePattern);
dateFormatterRegistrar.setFormatter(dateFormatter);
}
dateFormatterRegistrar.registerFormatters(this);
}
}
| 1,213 |
3,285 | <reponame>grybd/oneflow<filename>python/oneflow/test/graph/test_graph_pipeline.py<gh_stars>1000+
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import unittest
import numpy as np
import oneflow as flow
import oneflow.unittest
rank = flow.env.get_rank()
class OFRecordDataLoader(flow.nn.Module):
def __init__(
self,
ofrecord_root: str = "./ofrecord",
mode: str = "train", # "val"
dataset_size: int = 9469,
batch_size: int = 1,
placement=None,
sbp=None,
):
super().__init__()
channel_last = False
output_layout = "NHWC" if channel_last else "NCHW"
self.train_record_reader = flow.nn.OFRecordReader(
ofrecord_root + "/" + mode,
batch_size=batch_size,
data_part_num=40,
part_name_suffix_length=5,
random_shuffle=False,
shuffle_after_epoch=False,
placement=placement,
sbp=sbp,
random_seed=0,
)
self.record_label_decoder = flow.nn.OFRecordRawDecoder(
"class/label", shape=(), dtype=flow.int32
)
color_space = "RGB"
height = 22
width = 22
self.record_image_decoder = flow.nn.OFRecordImageDecoder(
"encoded", color_space=color_space
)
self.resize = flow.nn.image.Resize(target_size=[height, width])
self.batch_size = batch_size
self.dataset_size = dataset_size
def __len__(self):
return self.dataset_size // self.batch_size
def forward(self):
train_record = self.train_record_reader()
label = self.record_label_decoder(train_record)
image_raw_buffer = self.record_image_decoder(train_record)
image = self.resize(image_raw_buffer)[0]
image = flow.flatten(image.to(flow.float32), start_dim=1)
return image, label
def _train_with_graph(iter_num=3):
B = [flow.sbp.broadcast]
P0 = flow.placement("cuda", {0: [0]})
P1 = flow.placement("cuda", {0: [1]})
P2 = flow.placement("cuda", {0: [2]})
P3 = flow.placement("cuda", {0: [3]})
train_data_loader = OFRecordDataLoader(
ofrecord_root="/dataset/ImageNet/ofrecord",
mode="train",
dataset_size=400,
batch_size=4,
placement=P0,
sbp=B,
)
def _get_ppm_and_opt():
class StageModule(flow.nn.Module):
def __init__(self, *linear_args):
super().__init__()
self.linear = flow.nn.Linear(*linear_args)
flow.nn.init.constant_(self.linear.weight, 0.00023)
def forward(self, input):
out = self.linear(input)
return out
class PipelineModule(flow.nn.Module):
def __init__(self):
super().__init__()
# Initlize module and move each module to the right placement of its pipeline stage.
self.stage_0_m = StageModule(1452, 8, False).to_consistent(
placement=P0, sbp=B
)
self.stage_1_m = StageModule(8, 8, False).to_consistent(
placement=P1, sbp=B
)
self.stage_2_m = StageModule(8, 8, False).to_consistent(
placement=P2, sbp=B
)
self.stage_3_m = StageModule(8, 1, False).to_consistent(
placement=P3, sbp=B
)
def forward(self, image):
out = self.stage_0_m(image)
# Move tensor between different pipeline stages.
out = out.to_consistent(placement=P1, sbp=B)
out = self.stage_1_m(out)
out = out.to_consistent(placement=P2, sbp=B)
out = self.stage_2_m(out)
out = out.to_consistent(placement=P3, sbp=B)
out = self.stage_3_m(out)
return out
pp_m = PipelineModule()
sgd = flow.optim.SGD(pp_m.parameters(), lr=0.0001)
return pp_m, sgd
pp_m, sgd = _get_ppm_and_opt()
class PipelineGraph(flow.nn.Graph):
def __init__(self):
super().__init__()
self.train_data_loader = train_data_loader
self.pp_m = pp_m
# Set different module's stage id to hint the graph preparing right num of buffers in pipeline.
self.pp_m.stage_0_m.config.stage_id = 0
self.pp_m.stage_1_m.config.stage_id = 1
self.pp_m.stage_2_m.config.stage_id = 2
self.pp_m.stage_3_m.config.stage_id = 3
self.mseloss = flow.nn.MSELoss("sum")
self.add_optimizer(sgd)
# Let graph to do gradient accumulatioin, pipline execution depends on gradient accumulatioin.
self.config.set_gradient_accumulation_steps(4)
def build(self):
image, label = self.train_data_loader()
# Dataloader's outputs are on host memory, so move it to device 0.
image = image.to_consistent(placement=P0, sbp=B)
pp_m.train()
out = self.pp_m(image)
# Dataloader's outputs are on host memory, so move it to device 3.
label = label.to_consistent(placement=P3, sbp=B)
loss = self.mseloss(out, label.to(dtype=flow.float32))
loss.backward()
# Returning image and label is just for re-using data in eager test
image = image.to_consistent(placement=P3, sbp=B)
return loss, image, label
pp_g = PipelineGraph()
def one_iter(iter_idx):
loss, image, label = pp_g()
if rank == 3:
# loss on other rank are 0-Size tensor
loss = loss.to_local()
loss_np = loss.numpy()
print("loss numpy \n", loss)
image = image.to_local().numpy()
label = label.to_local().numpy()
return loss, image, label
check_list = []
data_list = []
for i in range(iter_num):
out = one_iter(i)
if rank == 3:
check_list.append(out[0])
data_list.append((out[1], out[2]))
return check_list, data_list
def _train_with_module(iter_num=3, data=None):
class DataModule(flow.nn.Module):
def __init__(self, data):
super().__init__()
self.data_list = []
self.idx = 0
for pair in data:
for i in range(4):
s = i * 4
e = s + 4
micro_batch_image = pair[0][s:e]
micro_batch_label = pair[1][s:e]
self.data_list.append(
(
flow.Tensor(micro_batch_image).to("cuda:3"),
flow.Tensor(micro_batch_label).to("cuda:3"),
)
)
def forward(self):
image = self.data_list[self.idx][0]
label = self.data_list[self.idx][1]
self.idx += 1
return image, label
class TrainModule(flow.nn.Module):
def __init__(self):
super().__init__()
self.linear = flow.nn.Linear(1452, 8, False)
flow.nn.init.constant_(self.linear.weight, 0.00023)
self.linear.to("cuda:3")
self.linear1 = flow.nn.Linear(8, 8, False)
flow.nn.init.constant_(self.linear1.weight, 0.00023)
self.linear1.to("cuda:3")
self.linear2 = flow.nn.Linear(8, 8, False)
flow.nn.init.constant_(self.linear2.weight, 0.00023)
self.linear2.to("cuda:3")
self.linear3 = flow.nn.Linear(8, 1, False)
flow.nn.init.constant_(self.linear3.weight, 0.00023)
self.linear3.to("cuda:3")
self.mseloss = flow.nn.MSELoss("sum")
def forward(self, image, label):
out = self.linear(image)
out = self.linear1(out)
out = self.linear2(out)
out = self.linear3(out)
loss = self.mseloss(out, label)
return loss
if rank == 3:
data_m = DataModule(data)
train_m = TrainModule()
sgd = flow.optim.SGD(train_m.parameters(), lr=0.0001)
def one_iter(iter_idx):
if rank == 3:
image, label = data_m()
loss = train_m(image, label)
loss_np = loss.numpy()
print("eager loss numpy \n", loss_np)
loss = loss * 0.25
loss.backward()
if iter_idx % 4 == 3:
print(f"iter index: {iter_idx}")
# eager gradient accumulatioin
sgd.step()
sgd.zero_grad()
return loss_np
check_list = []
for i in range(iter_num):
check_list.append(one_iter(i))
return check_list
def _test_graph_pipeline(test_case):
iter_num = 3
graph_check_list, data = _train_with_graph(iter_num)
module_check_list = _train_with_module(iter_num * 4, data)
if rank == 3:
for i in range(iter_num * 4):
# check equal on loss
test_case.assertTrue(
np.array_equal(module_check_list[i], graph_check_list[i // 4][i % 4])
)
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
@flow.unittest.skip_unless_1n4d()
class TestGraphPipeline(oneflow.unittest.TestCase):
def test_graph_pipeline(test_case):
_test_graph_pipeline(test_case)
if __name__ == "__main__":
unittest.main()
| 5,203 |
3,933 | <reponame>BennZoll/roboto
typedef struct {
double x;
double y;
char ty;
} spiro_cp;
typedef struct spiro_seg_s spiro_seg;
spiro_seg *
run_spiro(const spiro_cp *src, int n);
void
free_spiro(spiro_seg *s);
void
spiro_to_bpath(const spiro_seg *s, int n, bezctx *bc);
double get_knot_th(const spiro_seg *s, int i);
| 160 |
507 | # tests/test_provider_paultyng_airtable.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:11:05 UTC)
def test_provider_import():
import terrascript.provider.paultyng.airtable
def test_datasource_import():
from terrascript.data.paultyng.airtable import airtable_table
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.paultyng.airtable
#
# t = terrascript.provider.paultyng.airtable.airtable()
# s = str(t)
#
# assert 'https://github.com/paultyng/terraform-provider-airtable' in s
# assert '0.1.0' in s
| 265 |
679 | <reponame>Grosskopf/openoffice
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
package mod._sc;
import java.io.PrintWriter;
import lib.StatusException;
import lib.TestCase;
import lib.TestEnvironment;
import lib.TestParameters;
import util.SOfficeFactory;
import util.XMLTools;
import com.sun.star.container.XIndexAccess;
import com.sun.star.document.XExporter;
import com.sun.star.lang.XComponent;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.sheet.XSpreadsheet;
import com.sun.star.sheet.XSpreadsheetDocument;
import com.sun.star.sheet.XSpreadsheets;
import com.sun.star.table.XCell;
import com.sun.star.uno.Any;
import com.sun.star.uno.AnyConverter;
import com.sun.star.uno.Type;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import com.sun.star.xml.sax.XDocumentHandler;
/**
* Test for object which is represented by service
* <code>com.sun.star.comp.Calc.XMLContentExporter</code>. <p>
* Object implements the following interfaces :
* <ul>
* <li><code>com::sun::star::lang::XInitialization</code></li>
* <li><code>com::sun::star::document::ExportFilter</code></li>
* <li><code>com::sun::star::document::XFilter</code></li>
* <li><code>com::sun::star::document::XExporter</code></li>
* <li><code>com::sun::star::beans::XPropertySet</code></li>
* </ul>
* @see com.sun.star.lang.XInitialization
* @see com.sun.star.document.ExportFilter
* @see com.sun.star.document.XFilter
* @see com.sun.star.document.XExporter
* @see com.sun.star.beans.XPropertySet
* @see ifc.lang._XInitialization
* @see ifc.document._ExportFilter
* @see ifc.document._XFilter
* @see ifc.document._XExporter
* @see ifc.beans._XPropertySet
*/
public class XMLContentExporter extends TestCase {
static XComponent xSheetDoc;
static ContentFilterChecker Filter;
/**
* New spreadsheet document created.
*/
protected void initialize( TestParameters tParam, PrintWriter log ) {
SOfficeFactory SOF = SOfficeFactory.getFactory( (XMultiServiceFactory)tParam.getMSF() );
try {
log.println( "creating a calc document" );
xSheetDoc = SOF.openDoc("scalc","_blank");
} catch ( com.sun.star.uno.Exception e ) {
// Some exception occured.FAILED
e.printStackTrace( log );
throw new StatusException( "Couldn't create document", e );
}
}
protected void cleanup( TestParameters tParam, PrintWriter log ) {
log.println( " disposing xCalcDoc " );
util.DesktopTools.closeDoc(xSheetDoc);
}
/**
* Creating a Testenvironment for the interfaces to be tested.
* Creates an instance of the service
* <code>com.sun.star.comp.Calc.XMLContentExporter</code> with
* argument which is an implementation of <code>XDocumentHandler</code>
* and which can check if required tags and character data is
* exported. <p>
* The calc document is set as a source document for exporter
* created. A cell in the sheet is set to some value. This made
* for checking if this value is successfully exported within
* the document content.
* Object relations created :
* <ul>
* <li> <code>'MediaDescriptor'</code> for
* {@link ifc.document._XFilter} interface </li>
* <li> <code>'XFilter.Checker'</code> for
* {@link ifc.document._XFilter} interface </li>
* <li> <code>'SourceDocument'</code> for
* {@link ifc.document._XExporter} interface </li>
* </ul>
*/
protected synchronized TestEnvironment createTestEnvironment(TestParameters tParam, PrintWriter log) {
XMultiServiceFactory xMSF = (XMultiServiceFactory)tParam.getMSF() ;
XInterface oObj = null;
final String CELL_TEXT = "XMLContentExporter";
ContentFilterChecker Filter = new ContentFilterChecker(log);
Any arg = new Any(new Type(XDocumentHandler.class), Filter);
try {
oObj = (XInterface) xMSF.createInstanceWithArguments(
"com.sun.star.comp.Calc.XMLContentExporter",
new Object[] {arg} );
XExporter xEx = (XExporter) UnoRuntime.queryInterface
(XExporter.class,oObj);
xEx.setSourceDocument(xSheetDoc);
// Setting some string to a cell
XSpreadsheetDocument xSpreadsheetDoc = (XSpreadsheetDocument)
UnoRuntime.queryInterface(XSpreadsheetDocument.class, xSheetDoc);
XSpreadsheets xSpreadsheets = xSpreadsheetDoc.getSheets();
XIndexAccess xSheetsIndexArray = (XIndexAccess)
UnoRuntime.queryInterface(XIndexAccess.class, xSpreadsheets);
XSpreadsheet xSheet = (XSpreadsheet) AnyConverter.toObject(
new Type(XSpreadsheet.class),xSheetsIndexArray.getByIndex(0));
XCell xCell = xSheet.getCellByPosition(0, 0);
xCell.setFormula(CELL_TEXT);
log.println("fill sheet 1 with contnet...");
util.CalcTools.fillCalcSheetWithContent(xSheetDoc, 1, 1, 1, 5, 5);
} catch (com.sun.star.uno.Exception e) {
e.printStackTrace(log) ;
throw new StatusException("Can't create component.", e) ;
} catch (java.lang.Exception e) {
e.printStackTrace(log);
throw new StatusException("Can't create environment.", e);
}
// adding tags which must be contained in XML output
Filter.addTag("office:document-content");
Filter.addTagEnclosed("office:body", "office:document-content");
Filter.addTagEnclosed("office:script", "office:document-content");
Filter.addTagEnclosed("table:table", "office:body");
Filter.addTagEnclosed("table:table-column", "table:table");
Filter.addTagEnclosed("table:table-row", "table:table");
Filter.addTagEnclosed("table:table-cell", "table:table-row");
Filter.addTagEnclosed("text:p", "table:table-cell");
Filter.addCharactersEnclosed(CELL_TEXT, "text:p");
// create testobject here
log.println( "creating a new environment" );
TestEnvironment tEnv = new TestEnvironment( oObj );
tEnv.addObjRelation("MediaDescriptor", XMLTools.createMediaDescriptor(
new String[] {"FilterName"},
new Object[] {"scalc: StarOffice XML (Calc)"}));
tEnv.addObjRelation("SourceDocument",xSheetDoc);
tEnv.addObjRelation("XFilter.Checker", Filter);
return tEnv;
}
/**
* This class checks the XML for tags and data required and returns
* checking result to <code>XFilter</code> interface test. All
* the information about errors occurred in XML data is written
* to log specified.
* @see ifc.document._XFilter
*/
protected class ContentFilterChecker extends XMLTools.XMLTagsChecker
implements ifc.document._XFilter.FilterChecker {
/**
* Creates a class which will write information
* into log specified.
*/
public ContentFilterChecker(PrintWriter log) {
super(log) ;
}
/**
* <code>_XFilter.FilterChecker</code> interface method
* which returns the result of XML checking.
* @return <code>true</code> if the XML data exported was
* valid (i.e. all necessary tags and character data exists),
* <code>false</code> if some errors occurred.
*/
public boolean checkFilter() {
return checkTags();
}
}
}
| 3,153 |
357 | <filename>vmidentity/platform/src/main/java/com/vmware/identity/interop/domainmanager/LinuxDomainTrust.java<gh_stars>100-1000
/*
* Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.vmware.identity.interop.domainmanager;
import com.sun.jna.Platform;
import com.sun.jna.Pointer;
/**
* Created by IntelliJ IDEA.
* User: wfu
* Date: 04/09/2013
* Time: 10:59 PM
*/
public class LinuxDomainTrust
{
class LsaTrustType
{
public static final int LSA_TRUST_TYPE_DOWNLEVEL = 0x00000001;
public static final int LSA_TRUST_TYPE_UPLEVEL = 0x00000002;
public static final int LSA_TRUST_TYPE_MIT = 0x00000003;
public static final int LSA_TRUST_TYPE_DCE = 0x00000004;
}
class LsaTrustAttribute
{
public static final int LSA_TRUST_ATTRIBUTE_NON_TRANSITIVE = 0x00000001;
public static final int LSA_TRUST_ATTRIBUTE_UPLEVEL_ONLY = 0x00000002;
public static final int LSA_TRUST_ATTRIBUTE_FILTER_SIDS = 0x00000004;
public static final int LSA_TRUST_ATTRIBUTE_FOREST_TRANSITIVE = 0x00000008;
public static final int LSA_TRUST_ATTRIBUTE_CROSS_ORGANIZATION = 0x00000010;
public static final int LSA_TRUST_ATTRIBUTE_WITHIN_FOREST = 0x00000020;
}
class LsaTrustFlag
{
public static final int LSA_TRUST_FLAG_IN_FOREST = 0x00000001;
public static final int LSA_TRUST_FLAG_OUTBOUND = 0x00000002;
public static final int LSA_TRUST_FLAG_TREEROOT = 0x00000004;
public static final int LSA_TRUST_FLAG_PRIMARY = 0x00000008;
public static final int LSA_TRUST_FLAG_NATIVE = 0x00000010;
public static final int LSA_TRUST_FLAG_INBOUND = 0x00000020;
}
public boolean IsInforest;
public boolean IsOutBound;
public boolean IsInBound;
public boolean IsRoot;
public boolean IsPrimary;
public boolean IsNativeMode;
public DomainControllerInfo dcInfo;
public String domainName;
public String domainIpAddress;
public String domainFQDN;
public String domainDnsForestName;
public String pszDnsDomain;
public String pszNetbiosDomain;
public String pszTrusteeDnsDomain;
public String pszDomainSID;
public String pszDomainGUID;
public String pszForestName;
public String pszClientSiteName;
public LinuxDomainTrust(LsaTrustedDomainInfoNative trust)
{
assert Platform.isLinux();
this.IsInforest = (trust.dwTrustFlags & LsaTrustFlag.LSA_TRUST_FLAG_IN_FOREST) != 0 ;
this.IsOutBound = (trust.dwTrustFlags & LsaTrustFlag.LSA_TRUST_FLAG_OUTBOUND) != 0;
this.IsInBound = (trust.dwTrustFlags & LsaTrustFlag.LSA_TRUST_FLAG_INBOUND) != 0;
this.IsRoot = (trust.dwTrustFlags & LsaTrustFlag.LSA_TRUST_FLAG_TREEROOT) != 0;
this.IsPrimary = (trust.dwTrustFlags & LsaTrustFlag.LSA_TRUST_FLAG_PRIMARY) != 0;
this.IsNativeMode = (trust.dwTrustFlags & LsaTrustFlag.LSA_TRUST_FLAG_NATIVE) != 0;
if (trust.pDCInfo != Pointer.NULL)
{
LsaDcInfoNative lsaDcInfo = new LsaDcInfoNative(trust.pDCInfo);
this.dcInfo = new DomainControllerInfo(trust.pszDnsDomain,
trust.pszNetbiosDomain,
lsaDcInfo.pszAddress,
lsaDcInfo.pszName,
trust.pszForestName);
}
else
{
this.dcInfo = new DomainControllerInfo(trust.pszDnsDomain,
trust.pszNetbiosDomain,
"",
"",
trust.pszForestName);
}
}
}
| 1,964 |
628 | from doctr.file_utils import is_torch_available
def test_file_utils():
assert is_torch_available()
| 35 |
465 | #pragma once
#include <lanelet2_io/Exceptions.h>
#include <lanelet2_io/Projection.h>
namespace lanelet {
namespace projection {
class UtmProjector : public Projector {
public:
explicit UtmProjector(Origin origin, bool useOffset = true, bool throwInPaddingArea = false);
BasicPoint3d forward(const GPSPoint& gps) const override;
GPSPoint reverse(const BasicPoint3d& utm) const override;
private:
int zone_{};
bool isInNorthernHemisphere_{true}, useOffset_{}, throwInPaddingArea_{};
double xOffset_{}, yOffset_{};
};
} // namespace projection
} // namespace lanelet
| 183 |
1,350 | <reponame>billwert/azure-sdk-for-java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.hybridnetwork.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.hybridnetwork.fluent.models.NetworkFunctionInner;
import com.azure.resourcemanager.hybridnetwork.models.TagsObject;
/** An instance of this class provides access to all the operations defined in NetworkFunctionsClient. */
public interface NetworkFunctionsClient {
/**
* Deletes the specified network function resource. This operation can take up to 1 hour to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName The name of the network function.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(String resourceGroupName, String networkFunctionName);
/**
* Deletes the specified network function resource. This operation can take up to 1 hour to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName The name of the network function.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String networkFunctionName, Context context);
/**
* Deletes the specified network function resource. This operation can take up to 1 hour to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName The name of the network function.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String networkFunctionName);
/**
* Deletes the specified network function resource. This operation can take up to 1 hour to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName The name of the network function.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String networkFunctionName, Context context);
/**
* Gets information about the specified network function resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName The name of the network function resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return information about the specified network function resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
NetworkFunctionInner getByResourceGroup(String resourceGroupName, String networkFunctionName);
/**
* Gets information about the specified network function resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName The name of the network function resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return information about the specified network function resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<NetworkFunctionInner> getByResourceGroupWithResponse(
String resourceGroupName, String networkFunctionName, Context context);
/**
* Creates or updates a network function resource. This operation can take up to 6 hours to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName Resource name for the network function resource.
* @param parameters Parameters supplied in the body to the create or update network function operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return network function resource response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<NetworkFunctionInner>, NetworkFunctionInner> beginCreateOrUpdate(
String resourceGroupName, String networkFunctionName, NetworkFunctionInner parameters);
/**
* Creates or updates a network function resource. This operation can take up to 6 hours to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName Resource name for the network function resource.
* @param parameters Parameters supplied in the body to the create or update network function operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return network function resource response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<NetworkFunctionInner>, NetworkFunctionInner> beginCreateOrUpdate(
String resourceGroupName, String networkFunctionName, NetworkFunctionInner parameters, Context context);
/**
* Creates or updates a network function resource. This operation can take up to 6 hours to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName Resource name for the network function resource.
* @param parameters Parameters supplied in the body to the create or update network function operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return network function resource response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
NetworkFunctionInner createOrUpdate(
String resourceGroupName, String networkFunctionName, NetworkFunctionInner parameters);
/**
* Creates or updates a network function resource. This operation can take up to 6 hours to complete. This is
* expected service behavior.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName Resource name for the network function resource.
* @param parameters Parameters supplied in the body to the create or update network function operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return network function resource response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
NetworkFunctionInner createOrUpdate(
String resourceGroupName, String networkFunctionName, NetworkFunctionInner parameters, Context context);
/**
* Updates the tags for the network function resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName Resource name for the network function resource.
* @param parameters Parameters supplied to the update network function tags operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return network function resource response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
NetworkFunctionInner updateTags(String resourceGroupName, String networkFunctionName, TagsObject parameters);
/**
* Updates the tags for the network function resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param networkFunctionName Resource name for the network function resource.
* @param parameters Parameters supplied to the update network function tags operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return network function resource response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<NetworkFunctionInner> updateTagsWithResponse(
String resourceGroupName, String networkFunctionName, TagsObject parameters, Context context);
/**
* Lists all the network functions in a subscription.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for network function API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<NetworkFunctionInner> list();
/**
* Lists all the network functions in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for network function API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<NetworkFunctionInner> list(Context context);
/**
* Lists all the network function resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for network function API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<NetworkFunctionInner> listByResourceGroup(String resourceGroupName);
/**
* Lists all the network function resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for network function API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<NetworkFunctionInner> listByResourceGroup(String resourceGroupName, Context context);
}
| 3,959 |
583 | <gh_stars>100-1000
#include "transaction_manager.hpp"
#include "commit_context.hpp"
#include "storage/mvcc_data.hpp"
#include "transaction_context.hpp"
#include "utils/assert.hpp"
namespace opossum {
TransactionManager::TransactionManager()
: _next_transaction_id{INITIAL_TRANSACTION_ID},
_last_commit_id{INITIAL_COMMIT_ID},
_last_commit_context{std::make_shared<CommitContext>(INITIAL_COMMIT_ID)} {}
TransactionManager::~TransactionManager() {
Assert(_active_snapshot_commit_ids.empty(),
"Some transactions do not seem to have finished yet as they are still registered as active.");
}
TransactionManager& TransactionManager::operator=(TransactionManager&& transaction_manager) noexcept {
_next_transaction_id = transaction_manager._next_transaction_id.load();
_last_commit_id = transaction_manager._last_commit_id.load();
_last_commit_context = transaction_manager._last_commit_context;
_active_snapshot_commit_ids = transaction_manager._active_snapshot_commit_ids;
return *this;
}
CommitID TransactionManager::last_commit_id() const { return _last_commit_id; }
std::shared_ptr<TransactionContext> TransactionManager::new_transaction_context(const AutoCommit auto_commit) {
const TransactionID snapshot_commit_id = _last_commit_id;
return std::make_shared<TransactionContext>(_next_transaction_id++, snapshot_commit_id, auto_commit);
}
void TransactionManager::_register_transaction(const CommitID snapshot_commit_id) {
std::lock_guard<std::mutex> lock(_active_snapshot_commit_ids_mutex);
_active_snapshot_commit_ids.insert(snapshot_commit_id);
}
void TransactionManager::_deregister_transaction(const CommitID snapshot_commit_id) {
std::lock_guard<std::mutex> lock(_active_snapshot_commit_ids_mutex);
auto it = std::find(_active_snapshot_commit_ids.begin(), _active_snapshot_commit_ids.end(), snapshot_commit_id);
if (it != _active_snapshot_commit_ids.end()) {
_active_snapshot_commit_ids.erase(it);
return;
}
Assert(
it == _active_snapshot_commit_ids.end(),
"Could not find snapshot_commit_id in TransactionManager's _active_snapshot_commit_ids. Therefore, the removal "
"failed and the function should not have been called.");
}
std::optional<CommitID> TransactionManager::get_lowest_active_snapshot_commit_id() const {
std::lock_guard<std::mutex> lock(_active_snapshot_commit_ids_mutex);
if (_active_snapshot_commit_ids.empty()) {
return std::nullopt;
}
auto it = std::min_element(_active_snapshot_commit_ids.begin(), _active_snapshot_commit_ids.end());
return *it;
}
/**
* Logic of the lock-free algorithm
*
* Let’s say n threads call this method simultaneously. They all enter the main while-loop.
* Eventually they reach the point where they try to set the successor of _last_commit_context
* (pointed to by current_context). Only one of them will succeed and will be able to pass the
* following if statement. The rest continues with the loop and will now try to get the latest
* context, which does not have a successor. As long as the thread that succeeded setting
* the next commit context has not finished updating _last_commit_context, they are stuck in
* the small while-loop. As soon as it is done, _last_commit_context will point to a commit
* context with no successor and they will be able to leave this loop.
*/
std::shared_ptr<CommitContext> TransactionManager::_new_commit_context() {
auto current_context = std::atomic_load(&_last_commit_context);
auto next_context = std::shared_ptr<CommitContext>();
auto success = false;
while (!success) {
while (current_context->has_next()) {
current_context = std::atomic_load(&_last_commit_context);
}
next_context = std::make_shared<CommitContext>(current_context->commit_id() + 1u);
success = current_context->try_set_next(next_context);
if (!success) continue;
/**
* Only one thread at a time can ever reach this code since only one thread
* succeeds to set _last_commit_context’s successor.
*/
success = std::atomic_compare_exchange_strong(&_last_commit_context, ¤t_context, next_context);
Assert(success, "Invariant violated.");
}
return next_context;
}
void TransactionManager::_try_increment_last_commit_id(const std::shared_ptr<CommitContext>& context) {
auto current_context = context;
while (current_context->is_pending()) {
auto expected_last_commit_id = current_context->commit_id() - 1;
if (!_last_commit_id.compare_exchange_strong(expected_last_commit_id, current_context->commit_id())) return;
current_context->fire_callback();
if (!current_context->has_next()) return;
current_context = current_context->next();
}
}
} // namespace opossum
| 1,511 |
1,847 | // Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <gtest/gtest.h>
#include <stdint.h>
#include <memory>
#include <variant>
#include "PerfEvent.h"
#include "PerfEventQueue.h"
namespace orbit_linux_tracing {
namespace {
// We do the testing with `ForkPerfEvent`s - that is just an arbitrary choice.
PerfEvent MakeTestEventNotOrdered(uint64_t timestamp) {
return ForkPerfEvent{
.timestamp = timestamp,
.ordered_stream = PerfEventOrderedStream::kNone,
};
}
PerfEvent MakeTestEventOrderedInFd(int origin_fd, uint64_t timestamp) {
return ForkPerfEvent{
.timestamp = timestamp,
.ordered_stream = PerfEventOrderedStream::FileDescriptor(origin_fd),
};
}
PerfEvent MakeTestEventOrderedInTid(pid_t tid, uint64_t timestamp) {
return ForkPerfEvent{
.timestamp = timestamp,
.ordered_stream = PerfEventOrderedStream::ThreadId(tid),
};
}
} // namespace
TEST(PerfEventQueue, SingleFd) {
constexpr int kOriginFd = 11;
PerfEventQueue event_queue;
uint64_t current_oldest_timestamp = 0;
EXPECT_FALSE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventOrderedInFd(kOriginFd, 100));
event_queue.PushEvent(MakeTestEventOrderedInFd(kOriginFd, 101));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 100;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
event_queue.PushEvent(MakeTestEventOrderedInFd(kOriginFd, 102));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 101;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 102;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventOrderedInFd(kOriginFd, 103));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 103;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
}
TEST(PerfEventQueue, FdWithDecreasingTimestamps) {
PerfEventQueue event_queue;
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 101));
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 103));
EXPECT_DEATH(event_queue.PushEvent(MakeTestEventOrderedInFd(11, 102)), "");
}
TEST(PerfEventQueue, TidWithDecreasingTimestamps) {
PerfEventQueue event_queue;
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 101));
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 103));
EXPECT_DEATH(event_queue.PushEvent(MakeTestEventOrderedInTid(11, 102)), "");
}
TEST(PerfEventQueue, MultipleFd) {
PerfEventQueue event_queue;
uint64_t current_oldest_timestamp;
EXPECT_FALSE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 103));
event_queue.PushEvent(MakeTestEventOrderedInFd(22, 101));
event_queue.PushEvent(MakeTestEventOrderedInFd(22, 102));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 101;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 102;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
event_queue.PushEvent(MakeTestEventOrderedInFd(33, 100));
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 104));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 100;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 103;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 104;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
}
TEST(PerfEventQueue, MultipleTids) {
PerfEventQueue event_queue;
uint64_t current_oldest_timestamp;
EXPECT_FALSE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 103));
event_queue.PushEvent(MakeTestEventOrderedInTid(22, 101));
event_queue.PushEvent(MakeTestEventOrderedInTid(22, 102));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 101;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 102;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
event_queue.PushEvent(MakeTestEventOrderedInTid(33, 100));
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 104));
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 100;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 103;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
current_oldest_timestamp = 104;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
}
TEST(PerfEventQueue, FdWithOldestAndNewestEvent) {
PerfEventQueue event_queue;
EXPECT_FALSE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 101));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PushEvent(MakeTestEventOrderedInFd(22, 102));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PushEvent(MakeTestEventOrderedInFd(33, 103));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PushEvent(MakeTestEventOrderedInFd(44, 104));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PushEvent(MakeTestEventOrderedInFd(55, 105));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PushEvent(MakeTestEventOrderedInFd(66, 106));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 999));
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 101);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 102);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 103);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 104);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 105);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 106);
event_queue.PopEvent();
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, 999);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
}
TEST(PerfEventQueue, NoOrder) {
PerfEventQueue event_queue;
uint64_t current_oldest_timestamp = 0;
EXPECT_FALSE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventNotOrdered(104));
current_oldest_timestamp = 104;
EXPECT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PushEvent(MakeTestEventNotOrdered(101));
current_oldest_timestamp = 101;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PushEvent(MakeTestEventNotOrdered(102));
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
current_oldest_timestamp = 102;
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
current_oldest_timestamp = 104;
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
ASSERT_TRUE(event_queue.HasEvent());
event_queue.PushEvent(MakeTestEventNotOrdered(103));
current_oldest_timestamp = 103;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
current_oldest_timestamp = 104;
ASSERT_TRUE(event_queue.HasEvent());
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp);
event_queue.PopEvent();
ASSERT_FALSE(event_queue.HasEvent());
EXPECT_DEATH(event_queue.PopEvent(), "");
}
TEST(PerfEventQueue, OrderedInFdAndNoOrderTogether) {
PerfEventQueue event_queue;
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 103));
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 105));
event_queue.PushEvent(MakeTestEventOrderedInFd(22, 102));
event_queue.PushEvent(MakeTestEventNotOrdered(108));
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 107));
event_queue.PushEvent(MakeTestEventOrderedInFd(22, 106));
event_queue.PushEvent(MakeTestEventNotOrdered(101));
event_queue.PushEvent(MakeTestEventNotOrdered(104));
event_queue.PushEvent(MakeTestEventOrderedInFd(22, 109));
uint64_t current_oldest_timestamp = 101;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
EXPECT_DEATH(event_queue.PopEvent(), "");
}
TEST(PerfEventQueue, AllOrderTypesTogether) {
PerfEventQueue event_queue;
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 103));
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 105));
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 102));
event_queue.PushEvent(MakeTestEventNotOrdered(108));
event_queue.PushEvent(MakeTestEventOrderedInFd(11, 107));
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 106));
event_queue.PushEvent(MakeTestEventNotOrdered(101));
event_queue.PushEvent(MakeTestEventNotOrdered(104));
event_queue.PushEvent(MakeTestEventOrderedInTid(11, 109));
uint64_t current_oldest_timestamp = 101;
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_EQ(event_queue.TopEvent().timestamp, current_oldest_timestamp++);
event_queue.PopEvent();
EXPECT_FALSE(event_queue.HasEvent());
EXPECT_DEATH(event_queue.PopEvent(), "");
}
TEST(
PerfEventQueue,
TopEventAndPopEventReturnTheSameWhenAnEventOrderedByFdAndAnEventWithNoOrderHaveTheSameTimestamp) {
PerfEventQueue event_queue;
constexpr uint64_t kCommonTimestamp = 100;
event_queue.PushEvent(MakeTestEventOrderedInFd(11, kCommonTimestamp));
event_queue.PushEvent(MakeTestEventNotOrdered(kCommonTimestamp));
const uint64_t top_timestamp = event_queue.TopEvent().timestamp;
const PerfEventOrderedStream top_order = event_queue.TopEvent().ordered_stream;
event_queue.PopEvent();
const uint64_t remaining_timestamp = event_queue.TopEvent().timestamp;
const PerfEventOrderedStream remaining_order = event_queue.TopEvent().ordered_stream;
EXPECT_EQ(top_timestamp, remaining_timestamp);
EXPECT_NE(top_order, remaining_order);
}
TEST(
PerfEventQueue,
TopEventAndPopEventReturnTheSameWhenAnEventOrderedByTidAndAnEventWithNoOrderHaveTheSameTimestamp) {
PerfEventQueue event_queue;
constexpr uint64_t kCommonTimestamp = 100;
event_queue.PushEvent(MakeTestEventOrderedInTid(11, kCommonTimestamp));
event_queue.PushEvent(MakeTestEventNotOrdered(kCommonTimestamp));
const uint64_t top_timestamp = event_queue.TopEvent().timestamp;
const PerfEventOrderedStream top_order = event_queue.TopEvent().ordered_stream;
event_queue.PopEvent();
const uint64_t remaining_timestamp = event_queue.TopEvent().timestamp;
const PerfEventOrderedStream remaining_order = event_queue.TopEvent().ordered_stream;
EXPECT_EQ(top_timestamp, remaining_timestamp);
EXPECT_NE(top_order, remaining_order);
}
TEST(
PerfEventQueue,
TopEventAndPopEventReturnTheSameWhenAnEventOrderedByFdAndAnEventOrderedByTidHaveTheSameTimestamp) {
PerfEventQueue event_queue;
constexpr uint64_t kCommonTimestamp = 100;
event_queue.PushEvent(MakeTestEventOrderedInFd(11, kCommonTimestamp));
event_queue.PushEvent(MakeTestEventOrderedInTid(22, kCommonTimestamp));
const uint64_t top_timestamp = event_queue.TopEvent().timestamp;
const PerfEventOrderedStream top_order = event_queue.TopEvent().ordered_stream;
event_queue.PopEvent();
const uint64_t remaining_timestamp = event_queue.TopEvent().timestamp;
const PerfEventOrderedStream remaining_order = event_queue.TopEvent().ordered_stream;
EXPECT_EQ(top_timestamp, remaining_timestamp);
EXPECT_NE(top_order, remaining_order);
}
} // namespace orbit_linux_tracing
| 5,221 |
407 | <reponame>iuskye/SREWorks
package com.alibaba.tesla.tkgone.backend.service.dto;
import java.util.Date;
import lombok.Builder;
import lombok.Data;
/**
* @author xueyong.zxy
*/
@Builder
@Data
public class BackendDTO {
private Long id;
private Date gmtCreate;
private Date gmtModified;
private String name;
private String type;
private String host;
private Long port;
private String user;
private String password;
// public BackendDTO(BackendVO backendVO) {
// BeanUtils.copyProperties(backendVO, this);
// }
}
| 219 |
1,511 | /* libs/corecg/SkDebug_brew.cpp
**
** Copyright 2009, The Android Open Source Project
** Copyright 2009, Company 100, Inc.
**
** Licensed under the Apache License, Version 2.0 (the "License"); | 56 |
2,326 | <reponame>mengzhisuoliu/librime
//
// Copyright RIME Developers
// Distributed under the BSD License
//
// 2011-12-01 <NAME> <<EMAIL>>
//
#ifndef RIME_DEPLOYER_H_
#define RIME_DEPLOYER_H_
#include <future>
#include <mutex>
#include <queue>
#include <boost/any.hpp>
#include <rime/common.h>
#include <rime/component.h>
#include <rime/messenger.h>
namespace rime {
class Deployer;
using TaskInitializer = boost::any;
class DeploymentTask : public Class<DeploymentTask, TaskInitializer> {
public:
DeploymentTask() = default;
virtual ~DeploymentTask() = default;
virtual bool Run(Deployer* deployer) = 0;
};
class Deployer : public Messenger {
public:
// read-only access after library initialization {
string shared_data_dir;
string user_data_dir;
string prebuilt_data_dir;
string staging_dir;
string sync_dir;
string user_id;
string distribution_name;
string distribution_code_name;
string distribution_version;
// }
Deployer();
~Deployer();
bool RunTask(const string& task_name,
TaskInitializer arg = TaskInitializer());
bool ScheduleTask(const string& task_name,
TaskInitializer arg = TaskInitializer());
void ScheduleTask(an<DeploymentTask> task);
an<DeploymentTask> NextTask();
bool HasPendingTasks();
bool Run();
bool StartWork(bool maintenance_mode = false);
bool StartMaintenance();
bool IsWorking();
bool IsMaintenanceMode();
// the following two methods equally wait until all threads are joined
void JoinWorkThread();
void JoinMaintenanceThread();
string user_data_sync_dir() const;
private:
std::queue<of<DeploymentTask>> pending_tasks_;
std::mutex mutex_;
std::future<void> work_;
bool maintenance_mode_ = false;
};
} // namespace rime
#endif // RIME_DEPLOYER_H_
| 610 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-cp8j-5593-5j34",
"modified": "2022-04-20T00:01:59Z",
"published": "2021-12-23T00:01:08Z",
"aliases": [
"CVE-2021-21906"
],
"details": "Stack-based buffer overflow vulnerability exists in how the CMA readfile function of Garrett Metal Detectors iC Module CMA Version 5.0 is used at various locations. The Garrett iC Module exposes an authenticated CLI over TCP port 6877. This interface is used by a secondary GUI client, called “CMA Connect”, to interact with the iC Module on behalf of the user. Every time a user submits a password to the CLI password prompt, the buffer containing their input is passed as the password parameter to the checkPassword function.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-21906"
},
{
"type": "WEB",
"url": "https://talosintelligence.com/vulnerability_reports/TALOS-2021-1357"
}
],
"database_specific": {
"cwe_ids": [
"CWE-787"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 499 |
465 | <gh_stars>100-1000
# https://www.kaggle.com/c/home-depot-product-search-relevance/forums/t/18967/data-preparation
COLOR_LIST = [
"white",
"black",
"brown",
"gray",
"chrome",
"stainless steel",
"whites",
"red",
"browns / tans",
"bronze",
"silver",
"blacks",
"beige",
"stainless",
"blue",
"nickel",
"metallics",
"clear",
"grays",
"green",
"multi",
"beige / cream",
"tan",
"greens",
"yellow",
"wood",
"blues",
"reds / pinks",
"brushed nickel",
"orange",
"metallic",
"brass",
"yellows / golds",
"oil rubbed bronze",
"polished chrome",
"almond",
"multi-colored",
"dark brown wood",
"primed white",
"beige/bisque",
"biscuit",
"ivory",
"oranges / peaches",
"grey",
"unfinished wood",
"light brown wood",
"wood grain",
"silver metallic",
"copper",
"medium brown wood",
"soft white",
"gold",
"satin nickel",
"cherry",
"bright white",
"red/orange",
"teal",
"natural",
"oak",
"mahogany",
"aluminum",
"espresso",
"unfinished",
"purples / lavenders",
"brown/tan",
"steel",
"venetian bronze",
"slate",
"warm white",
"bone",
"pink",
"stainless look",
"reddish brown wood",
"solid colors",
"off-white",
"walnut",
"chocolate",
"light almond",
"vibrant brushed nickel",
"satin white",
"polished brass",
"linen",
"white primer",
"purple",
"charcoal",
"color",
"oil-rubbed bronze",
"melamine white",
"turquoises / aquas",
"blue/purple",
"primed",
"bisque",
"browns/tans",
"assorted colors",
"java",
"pewter",
"chestnut",
"yellow/gold",
"taupe",
"pacific white",
"cedar",
"monochromatic stainless steel",
"other",
"platinum",
"mocha",
"cream",
"sand",
"daylight",
"brushed stainless steel",
"powder-coat white",
] | 661 |
447 | /*=============================================================================
Copyright (c) 2014-2021 <NAME>. All rights reserved.
Distributed under the MIT License [ https://opensource.org/licenses/MIT ]
=============================================================================*/
#if !defined(CYCFI_Q_BITSTREAM_HPP_MARCH_12_2018)
#define CYCFI_Q_BITSTREAM_HPP_MARCH_12_2018
#include <type_traits>
#include <cstddef>
#include <vector>
#include <algorithm>
#include <cstdint>
#include <q/support/base.hpp>
namespace cycfi::q
{
////////////////////////////////////////////////////////////////////////////
// The bitset class stores bits efficiently using integers <T>. Data is
// stored in a std::vector with a size that is fixed at construction time,
// given the number of bits required.
//
// Member functions are provided for:
//
// 1. Setting individual bits and ranges of bits
// 2. Geting each bit at position i
// 3. Clearing all bits
// 4. Getting the actual integers that stores the bits.
////////////////////////////////////////////////////////////////////////////
template <typename T = natural_uint>
class bitset
{
public:
using value_type = T;
using vector_type = std::vector<T>;
static_assert(std::is_unsigned<T>::value, "T must be unsigned");
static constexpr auto value_size = CHAR_BIT * sizeof(T);
static constexpr auto one = T{1};
bitset(std::size_t num_bits);
bitset(bitset const& rhs) = default;
bitset(bitset&& rhs) = default;
bitset& operator=(bitset const& rhs) = default;
bitset& operator=(bitset&& rhs) = default;
std::size_t size() const;
void clear();
void set(std::size_t i, bool val);
void set(std::size_t i, std::size_t n, bool val);
bool get(std::size_t i) const;
T* data();
T const* data() const;
private:
vector_type _bits;
};
////////////////////////////////////////////////////////////////////////////
// Implementation
////////////////////////////////////////////////////////////////////////////
template <typename T>
inline bitset<T>::bitset(std::size_t num_bits)
{
auto array_size = (num_bits + value_size - 1) / value_size;
_bits.resize(array_size, 0);
}
template <typename T>
inline std::size_t bitset<T>::size() const
{
return _bits.size() * value_size;
}
template <typename T>
inline void bitset<T>::clear()
{
std::fill(_bits.begin(), _bits.end(), 0);
}
template <typename T>
inline void bitset<T>::set(std::size_t i, bool val)
{
// Check that we don't get past the storage
if (i > size())
return;
auto mask = 1 << (i % value_size);
auto& ref = _bits[i / value_size];
ref ^= (-T(val) ^ ref) & mask;
}
template <typename T>
inline bool bitset<T>::get(std::size_t i) const
{
// Check we don't get past the storage
if (i > size())
return 0;
auto mask = one << (i % value_size);
return (_bits[i / value_size] & mask) != 0;
}
template <typename T>
inline void bitset<T>::set(std::size_t i, std::size_t n, bool val)
{
// Check that the index (i) does not get past size
auto size_ = size();
if (i > size_)
return;
// Check that the n does not get past the size
if ((i+n) > size_)
n = size_-i;
constexpr auto all_ones = int_traits<T>::max;
auto* p = _bits.data();
p += i / value_size; // Adjust the buffer pointer for the current index (i)
// Do the first partial int
auto mod = i & (value_size-1);
if (mod)
{
// mask off the high n bits we want to set
mod = value_size-mod;
// Calculate the mask
T mask = ~(all_ones >> mod);
// Adjust the mask if we're not going to reach the end of this int
if (n < mod)
mask &= (all_ones >> (mod-n));
if (val)
*p |= mask;
else
*p &= ~mask;
// Fast exit if we're done here!
if (n < mod)
return;
n -= mod;
++p;
}
// Write full ints while we can - effectively doing value_size bits at a time
if (n >= value_size)
{
// Store a local value to work with
T val_ = val ? all_ones : 0;
do
{
*p++ = val_;
n -= value_size;
}
while (n >= value_size);
}
// Now do the final partial int, if necessary
if (n)
{
mod = n & (value_size-1);
// Calculate the mask
T mask = (one << mod) - 1;
if (val)
*p |= mask;
else
*p &= ~mask;
}
}
template <typename T>
inline T* bitset<T>::data()
{
return _bits.data();
}
template <typename T>
inline T const* bitset<T>::data() const
{
return _bits.data();
}
}
#endif
| 2,213 |
504 | <reponame>ekoATgithub/dddlib<gh_stars>100-1000
package org.dayatang.db;
import java.util.List;
/**
* @author chencao
*
*/
public interface DBManager {
/**
* 执行SQL脚本
*
* @param sqlFile SQL脚本路径
*/
public void executeSQL(String sqlFile);
/**
* 清理数据库,删除所有表和视图
*/
public void cleanDatabase();
/**
* 导出数据库中所有表(包括视图)到指定路径
*
* @param filePath 指定路径
*/
public void exportAll(String filePath);
/**
* 导出数据库中不是以excludedTablePrefixs前缀开头的表(包括视图)到指定路径
*
* @param filePath 指定导出路径
* @param excludedTablePrefixs 被排除的前缀
*/
public void exportExcludes(String filePath, List<String> excludedTablePrefixs);
/**
* 导出数据库中以includedTablePrefixs前缀开头的表(包括视图)到指定路径
*
* @param filePath 指定导出路径
* @param includedTablePrefixs 包含的前缀
*/
public void exportIncludes(String filePath, List<String> includedTablePrefixs);
}
| 537 |